This commit is contained in:
Sebastian Hugentobler 2025-07-02 21:09:37 +02:00
parent 1c95f4391f
commit b4a0aadef9
Signed by: shu
SSH key fingerprint: SHA256:ppcx6MlixdNZd5EUM1nkHOKoyQYoJwzuQKXM6J/t66M
73 changed files with 2993 additions and 1632 deletions

664
Cargo.lock generated
View file

@ -167,6 +167,60 @@ dependencies = [
"arrayvec", "arrayvec",
] ]
[[package]]
name = "axum"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
dependencies = [
"axum-core",
"bytes",
"form_urlencoded",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"itoa",
"matchit",
"memchr",
"mime",
"percent-encoding",
"pin-project-lite",
"rustversion",
"serde",
"serde_json",
"serde_path_to_error",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tower",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "axum-core"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
dependencies = [
"bytes",
"futures-core",
"http",
"http-body",
"http-body-util",
"mime",
"pin-project-lite",
"rustversion",
"sync_wrapper",
"tower-layer",
"tower-service",
"tracing",
]
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.75" version = "0.3.75"
@ -257,6 +311,7 @@ dependencies = [
"r2d2_sqlite", "r2d2_sqlite",
"rusqlite", "rusqlite",
"serde", "serde",
"snafu",
"tempfile", "tempfile",
"thiserror 1.0.69", "thiserror 1.0.69",
"time", "time",
@ -289,12 +344,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.41" version = "0.4.41"
@ -507,6 +556,17 @@ dependencies = [
"crypto-common", "crypto-common",
] ]
[[package]]
name = "displaydoc"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "dyn-clone" name = "dyn-clone"
version = "1.0.19" version = "1.0.19"
@ -620,17 +680,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "futures-macro"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "futures-sink" name = "futures-sink"
version = "0.3.31" version = "0.3.31"
@ -650,12 +699,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-macro",
"futures-sink",
"futures-task", "futures-task",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
"slab",
] ]
[[package]] [[package]]
@ -706,8 +752,8 @@ dependencies = [
"aho-corasick", "aho-corasick",
"bstr", "bstr",
"log", "log",
"regex-automata", "regex-automata 0.4.9",
"regex-syntax", "regex-syntax 0.8.5",
] ]
[[package]] [[package]]
@ -764,30 +810,6 @@ dependencies = [
"hashbrown 0.15.4", "hashbrown 0.15.4",
] ]
[[package]]
name = "headers"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb"
dependencies = [
"base64",
"bytes",
"headers-core",
"http",
"httpdate",
"mime",
"sha1",
]
[[package]]
name = "headers-core"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
dependencies = [
"http",
]
[[package]] [[package]]
name = "heck" name = "heck"
version = "0.5.0" version = "0.5.0"
@ -888,6 +910,7 @@ dependencies = [
"hyper", "hyper",
"pin-project-lite", "pin-project-lite",
"tokio", "tokio",
"tower-service",
] ]
[[package]] [[package]]
@ -914,12 +937,119 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "icu_collections"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
dependencies = [
"displaydoc",
"potential_utf",
"yoke",
"zerofrom",
"zerovec",
]
[[package]]
name = "icu_locale_core"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
dependencies = [
"displaydoc",
"litemap",
"tinystr",
"writeable",
"zerovec",
]
[[package]]
name = "icu_normalizer"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
dependencies = [
"displaydoc",
"icu_collections",
"icu_normalizer_data",
"icu_properties",
"icu_provider",
"smallvec",
"zerovec",
]
[[package]]
name = "icu_normalizer_data"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
[[package]]
name = "icu_properties"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
dependencies = [
"displaydoc",
"icu_collections",
"icu_locale_core",
"icu_properties_data",
"icu_provider",
"potential_utf",
"zerotrie",
"zerovec",
]
[[package]]
name = "icu_properties_data"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
[[package]]
name = "icu_provider"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
dependencies = [
"displaydoc",
"icu_locale_core",
"stable_deref_trait",
"tinystr",
"writeable",
"yoke",
"zerofrom",
"zerotrie",
"zerovec",
]
[[package]] [[package]]
name = "ident_case" name = "ident_case"
version = "1.0.1" version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
dependencies = [
"idna_adapter",
"smallvec",
"utf8_iter",
]
[[package]]
name = "idna_adapter"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
dependencies = [
"icu_normalizer",
"icu_properties",
]
[[package]] [[package]]
name = "ignore" name = "ignore"
version = "0.4.23" version = "0.4.23"
@ -930,7 +1060,7 @@ dependencies = [
"globset", "globset",
"log", "log",
"memchr", "memchr",
"regex-automata", "regex-automata 0.4.9",
"same-file", "same-file",
"walkdir", "walkdir",
"winapi-util", "winapi-util",
@ -1085,31 +1215,42 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
[[package]]
name = "litemap"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
[[package]] [[package]]
name = "little-hesinde" name = "little-hesinde"
version = "0.3.1" version = "0.3.1"
dependencies = [ dependencies = [
"axum",
"calibre-db", "calibre-db",
"clap", "clap",
"ignore", "ignore",
"image", "image",
"mime_guess",
"once_cell", "once_cell",
"poem",
"quick-xml", "quick-xml",
"rust-embed", "rust-embed",
"serde", "serde",
"serde_json", "serde_json",
"serde_with", "serde_with",
"sha2", "sha2",
"snafu",
"tera", "tera",
"thiserror 1.0.69",
"time", "time",
"tokio", "tokio",
"tokio-util", "tokio-util",
"tower-http",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
"utoipa",
"utoipa-axum",
"utoipa-swagger-ui",
"uuid", "uuid",
"zip", "zip 4.2.0",
] ]
[[package]] [[package]]
@ -1137,6 +1278,21 @@ dependencies = [
"imgref", "imgref",
] ]
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata 0.1.10",
]
[[package]]
name = "matchit"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
[[package]] [[package]]
name = "maybe-rayon" name = "maybe-rayon"
version = "0.1.1" version = "0.1.1"
@ -1201,18 +1357,6 @@ version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
[[package]]
name = "nix"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
dependencies = [
"bitflags",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]] [[package]]
name = "nom" name = "nom"
version = "7.1.3" version = "7.1.3"
@ -1467,52 +1611,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]] [[package]]
name = "poem" name = "potential_utf"
version = "3.1.11" version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ea63e9302279b1ca262d15342760f8d08f04fb974d4997e8baed7d034b94121" checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
dependencies = [ dependencies = [
"bytes", "zerovec",
"futures-util",
"headers",
"hex",
"http",
"http-body-util",
"httpdate",
"hyper",
"hyper-util",
"mime",
"mime_guess",
"nix",
"parking_lot",
"percent-encoding",
"pin-project-lite",
"poem-derive",
"regex",
"rfc7239",
"rust-embed",
"serde",
"serde_json",
"serde_urlencoded",
"smallvec",
"sync_wrapper",
"thiserror 2.0.12",
"tokio",
"tokio-util",
"tracing",
"wildmatch",
]
[[package]]
name = "poem-derive"
version = "3.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "824e7fe35343b7fe354e5d4ac444ddbe674676ebba4b4e48565835661033d338"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn",
] ]
[[package]] [[package]]
@ -1530,15 +1634,6 @@ dependencies = [
"zerocopy", "zerocopy",
] ]
[[package]]
name = "proc-macro-crate"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35"
dependencies = [
"toml_edit",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.95" version = "1.0.95"
@ -1786,8 +1881,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-automata", "regex-automata 0.4.9",
"regex-syntax", "regex-syntax 0.8.5",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
] ]
[[package]] [[package]]
@ -1798,24 +1902,21 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-syntax", "regex-syntax 0.8.5",
] ]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.5" version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rfc7239"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a82f1d1e38e9a85bb58ffcfadf22ed6f2c94e8cd8581ec2b0f80a2a6858350f"
dependencies = [
"uncased",
]
[[package]] [[package]]
name = "rgb" name = "rgb"
version = "0.8.50" version = "0.8.50"
@ -1982,6 +2083,16 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "serde_path_to_error"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a"
dependencies = [
"itoa",
"serde",
]
[[package]] [[package]]
name = "serde_spanned" name = "serde_spanned"
version = "0.6.9" version = "0.6.9"
@ -2035,17 +2146,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "sha1"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.10.9" version = "0.10.9"
@ -2124,6 +2224,27 @@ version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "snafu"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320b01e011bf8d5d7a4a4a4be966d9160968935849c83b918827f6a435e7f627"
dependencies = [
"snafu-derive",
]
[[package]]
name = "snafu-derive"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1961e2ef424c1424204d3a5d6975f934f56b6d50ff5732382d84ebf460e147f7"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "socket2" name = "socket2"
version = "0.5.10" version = "0.5.10"
@ -2134,6 +2255,12 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.11.1" version = "0.11.1"
@ -2156,8 +2283,16 @@ name = "sync_wrapper"
version = "1.0.2" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
[[package]]
name = "synstructure"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [ dependencies = [
"futures-core", "proc-macro2",
"quote",
"syn",
] ]
[[package]] [[package]]
@ -2294,6 +2429,16 @@ dependencies = [
"time-core", "time-core",
] ]
[[package]]
name = "tinystr"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
dependencies = [
"displaydoc",
"zerovec",
]
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.45.1" version = "1.45.1"
@ -2369,12 +2514,57 @@ dependencies = [
"winnow", "winnow",
] ]
[[package]]
name = "tower"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tower-http"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
dependencies = [
"bitflags",
"bytes",
"http",
"http-body",
"pin-project-lite",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tower-layer"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]] [[package]]
name = "tracing" name = "tracing"
version = "0.1.41" version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [ dependencies = [
"log",
"pin-project-lite", "pin-project-lite",
"tracing-attributes", "tracing-attributes",
"tracing-core", "tracing-core",
@ -2418,10 +2608,14 @@ version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
dependencies = [ dependencies = [
"matchers",
"nu-ansi-term", "nu-ansi-term",
"once_cell",
"regex",
"sharded-slab", "sharded-slab",
"smallvec", "smallvec",
"thread_local", "thread_local",
"tracing",
"tracing-core", "tracing-core",
"tracing-log", "tracing-log",
] ]
@ -2438,15 +2632,6 @@ version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
[[package]]
name = "uncased"
version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697"
dependencies = [
"version_check",
]
[[package]] [[package]]
name = "unic-char-property" name = "unic-char-property"
version = "0.9.0" version = "0.9.0"
@ -2509,12 +2694,91 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "url"
version = "2.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
]
[[package]]
name = "utf8_iter"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]] [[package]]
name = "utf8parse" name = "utf8parse"
version = "0.2.2" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "utoipa"
version = "5.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fcc29c80c21c31608227e0912b2d7fddba57ad76b606890627ba8ee7964e993"
dependencies = [
"indexmap 2.10.0",
"serde",
"serde_json",
"utoipa-gen",
]
[[package]]
name = "utoipa-axum"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c25bae5bccc842449ec0c5ddc5cbb6a3a1eaeac4503895dc105a1138f8234a0"
dependencies = [
"axum",
"paste",
"tower-layer",
"tower-service",
"utoipa",
]
[[package]]
name = "utoipa-gen"
version = "5.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d79d08d92ab8af4c5e8a6da20c47ae3f61a0f1dabc1997cdf2d082b757ca08b"
dependencies = [
"proc-macro2",
"quote",
"regex",
"syn",
]
[[package]]
name = "utoipa-swagger-ui"
version = "9.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d047458f1b5b65237c2f6dc6db136945667f40a7668627b3490b9513a3d43a55"
dependencies = [
"axum",
"base64",
"mime_guess",
"regex",
"rust-embed",
"serde",
"serde_json",
"url",
"utoipa",
"utoipa-swagger-ui-vendored",
"zip 3.0.0",
]
[[package]]
name = "utoipa-swagger-ui-vendored"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2eebbbfe4093922c2b6734d7c679ebfebd704a0d7e56dfcb0d05818ce28977d"
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "1.17.0" version = "1.17.0"
@ -2645,12 +2909,6 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "wildmatch"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd"
[[package]] [[package]]
name = "winapi" name = "winapi"
version = "0.3.9" version = "0.3.9"
@ -2914,6 +3172,36 @@ dependencies = [
"bitflags", "bitflags",
] ]
[[package]]
name = "writeable"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive",
"zerofrom",
]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]] [[package]]
name = "zerocopy" name = "zerocopy"
version = "0.8.26" version = "0.8.26"
@ -2934,6 +3222,74 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "zerofrom"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
dependencies = [
"zerofrom-derive",
]
[[package]]
name = "zerofrom-derive"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerotrie"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
dependencies = [
"displaydoc",
"yoke",
"zerofrom",
]
[[package]]
name = "zerovec"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428"
dependencies = [
"yoke",
"zerofrom",
"zerovec-derive",
]
[[package]]
name = "zerovec-derive"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "zip"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12598812502ed0105f607f941c386f43d441e00148fce9dec3ca5ffb0bde9308"
dependencies = [
"arbitrary",
"crc32fast",
"flate2",
"indexmap 2.10.0",
"memchr",
"zopfli",
]
[[package]] [[package]]
name = "zip" name = "zip"
version = "4.2.0" version = "4.2.0"

View file

@ -6,6 +6,7 @@ members = [
[workspace.dependencies] [workspace.dependencies]
serde = "1.0.219" serde = "1.0.219"
snafu = { version = "0.8.6", features = ["rust_1_81"] }
thiserror = "1.0.61" thiserror = "1.0.61"
time = { version = "0.3.41", features = ["macros", "serde", "formatting", "parsing" ] } time = { version = "0.3.41", features = ["macros", "serde", "formatting", "parsing" ] }

View file

@ -12,6 +12,7 @@ r2d2 = "0.8.10"
r2d2_sqlite = "0.30.0" r2d2_sqlite = "0.30.0"
rusqlite = { version = "0.36.0", features = ["bundled", "time"] } rusqlite = { version = "0.36.0", features = ["bundled", "time"] }
serde = { workspace = true } serde = { workspace = true }
snafu = { workspace = true }
tempfile = "3.20.0" tempfile = "3.20.0"
thiserror = { workspace = true } thiserror = { workspace = true }
time = { workspace = true } time = { workspace = true }

View file

@ -1,17 +1,19 @@
//! Bundle all functions together. //! Bundle all functions together.
use std::path::{Path, PathBuf}; use std::{
io,
path::{Path, PathBuf},
};
use r2d2::Pool; use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager; use r2d2_sqlite::SqliteConnectionManager;
use tempfile::NamedTempFile; use tempfile::{NamedTempFile, PersistError};
use crate::{ use crate::{
data::{ data::{self, author::Author, book::Book, pagination::SortOrder, series::Series},
author::Author, book::Book, error::DataStoreError, pagination::SortOrder, series::Series,
},
search::search, search::search,
}; };
use snafu::{ResultExt, Snafu};
/// Top level calibre functions, bundling all sub functions in one place and providing secure access to /// Top level calibre functions, bundling all sub functions in one place and providing secure access to
/// the database. /// the database.
@ -21,16 +23,195 @@ pub struct Calibre {
search_db_path: PathBuf, search_db_path: PathBuf,
} }
#[derive(Debug, Snafu)]
pub enum LoadError {
#[snafu(display("Failed to create database connection pool."))]
DbPool { source: r2d2::Error },
#[snafu(display("Failed to create temporary database view."))]
TmpDb { source: io::Error },
#[snafu(display("Failed to keep temporary database from deletion."))]
PersistTmpDb { source: PersistError },
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to search."))]
pub struct SearchError {
source: crate::search::SearchError,
}
#[derive(Debug, Snafu)]
pub enum BooksError {
#[snafu(display("Failed to get database connection from pool."))]
BooksDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch multiple books."))]
FetchBooks {
source: data::book::MultipleBooksError,
},
}
#[derive(Debug, Snafu)]
pub enum AuthorsError {
#[snafu(display("Failed to get database connection from pool."))]
AuthorsDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch multiple authors."))]
FetchAuthors {
source: data::author::MultipleAuthorsError,
},
}
#[derive(Debug, Snafu)]
pub enum AuthorBooksError {
#[snafu(display("Failed to get database connection from pool."))]
AuthorBooksDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch multiple an author's books."))]
FetchAuthorBooks {
source: data::book::AuthorBooksError,
},
}
#[derive(Debug, Snafu)]
pub enum RecentBooksError {
#[snafu(display("Failed to get database connection from pool."))]
RecentBooksDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch recent books."))]
FetchRecentBooks {
source: data::book::RecentBooksError,
},
}
#[derive(Debug, Snafu)]
pub enum ScalarBookError {
#[snafu(display("Failed to get database connection from pool."))]
ScalarBookDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch book."))]
FetchScalarBook { source: data::book::ScalarBookError },
}
#[derive(Debug, Snafu)]
pub enum ScalarAuthorError {
#[snafu(display("Failed to get database connection from pool."))]
ScalarAuthorDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch author."))]
FetchScalarAuthor {
source: data::author::ScalarAuthorError,
},
}
#[derive(Debug, Snafu)]
pub enum ScalarSeriesError {
#[snafu(display("Failed to get database connection from pool."))]
ScalarSeriesDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch series."))]
FetchScalarSeries {
source: data::series::ScalarSeriesError,
},
}
#[derive(Debug, Snafu)]
pub enum BookAuthorError {
#[snafu(display("Failed to get database connection from pool."))]
BookAuthorDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch book's author."))]
FetchBookAuthor {
source: data::author::BookAuthorError,
},
}
#[derive(Debug, Snafu)]
pub enum MultipleSeriesError {
#[snafu(display("Failed to get database connection from pool."))]
MultipleSeriesDbPool { source: r2d2::Error },
#[snafu(display("Failed to fetch multiple series."))]
FetchMultipleSeries {
source: data::series::MultiplSeriesError,
},
}
#[derive(Debug, Snafu)]
pub enum BookSeriesError {
#[snafu(display("Failed to get database connection from pool."))]
BookSeriesDbPool { source: r2d2::Error },
#[snafu(display("Failed to get the series a book belongs to."))]
FetchBookSeries {
source: data::series::SeriesBooksError,
},
}
#[derive(Debug, Snafu)]
pub enum SeriesBooksError {
#[snafu(display("Failed to get database connection from pool."))]
SeriesBooksDbPool { source: r2d2::Error },
#[snafu(display("Failed to get a series' books."))]
FetchSeriesBooks { source: data::book::SeriesBookError },
}
#[derive(Debug, Snafu)]
pub enum HasPreviousAuthorsError {
#[snafu(display("Failed to get database connection from pool."))]
HasPreviousAuthorsDbPool { source: r2d2::Error },
#[snafu(display("Failed to check if there are previous authors."))]
FetchHasPreviousAuthors {
source: data::author::PreviousAuthorsError,
},
}
#[derive(Debug, Snafu)]
pub enum HasMoreAuthorsError {
#[snafu(display("Failed to get database connection from pool."))]
HasMoreAuthorsDbPool { source: r2d2::Error },
#[snafu(display("Failed to check if there are previous authors."))]
FetchHasMoreAuthors {
source: data::author::MoreAuthorsError,
},
}
#[derive(Debug, Snafu)]
pub enum HasPreviousBooksError {
#[snafu(display("Failed to get database connection from pool."))]
HasPreviousBooksDbPool { source: r2d2::Error },
#[snafu(display("Failed to check if there are previous books."))]
FetchHasPreviousBooks {
source: data::book::PreviousBooksError,
},
}
#[derive(Debug, Snafu)]
pub enum HasMoreBooksError {
#[snafu(display("Failed to get database connection from pool."))]
HasMoreBooksDbPool { source: r2d2::Error },
#[snafu(display("Failed to check if there are previous books."))]
FetchHasMoreBooks { source: data::book::MoreBooksError },
}
#[derive(Debug, Snafu)]
pub enum HasPreviousSeriesError {
#[snafu(display("Failed to get database connection from pool."))]
HasPreviousSeriesDbPool { source: r2d2::Error },
#[snafu(display("Failed to check if there are previous series."))]
FetchHasPreviousSeries {
source: data::series::PreviousSeriesError,
},
}
#[derive(Debug, Snafu)]
pub enum HasMoreSeriesError {
#[snafu(display("Failed to get database connection from pool."))]
HasMoreSeriesDbPool { source: r2d2::Error },
#[snafu(display("Failed to check if there are previous series."))]
FetchHasMoreSeries {
source: data::series::MoreSeriesError,
},
}
impl Calibre { impl Calibre {
/// Open a connection to the calibre database. /// Open a connection to the calibre database.
/// ///
/// Fail if the database file can not be opened or not be found. /// Fail if the database file can not be opened or not be found.
pub fn load(path: &Path) -> Result<Self, DataStoreError> { pub fn load(path: &Path) -> Result<Self, LoadError> {
let manager = SqliteConnectionManager::file(path); let manager = SqliteConnectionManager::file(path);
let pool = r2d2::Pool::new(manager)?; let pool = r2d2::Pool::new(manager).context(DbPoolSnafu)?;
let tmpfile = NamedTempFile::new()?; let tmpfile = NamedTempFile::new().context(TmpDbSnafu)?;
let (_, search_db_path) = tmpfile.keep()?; let (_, search_db_path) = tmpfile.keep().context(PersistTmpDbSnafu)?;
Ok(Self { Ok(Self {
pool, pool,
@ -41,8 +222,8 @@ impl Calibre {
/// Full text search with a query. /// Full text search with a query.
/// ///
/// See https://www.sqlite.org/fts5.html#full_text_query_syntax for syntax. /// See https://www.sqlite.org/fts5.html#full_text_query_syntax for syntax.
pub fn search(&self, query: &str) -> Result<Vec<Book>, DataStoreError> { pub fn search(&self, query: &str) -> Result<Vec<Book>, SearchError> {
search(query, &self.pool, &self.search_db_path) search(query, &self.pool, &self.search_db_path).context(SearchSnafu)
} }
/// Fetch book data from calibre, starting at `cursor`, fetching up to an amount of `limit` and /// Fetch book data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
@ -52,9 +233,9 @@ impl Calibre {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: &SortOrder, sort_order: &SortOrder,
) -> Result<Vec<Book>, DataStoreError> { ) -> Result<Vec<Book>, BooksError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(BooksDbPoolSnafu)?;
Book::multiple(&conn, limit, cursor, sort_order) Book::multiple(&conn, limit, cursor, sort_order).context(FetchBooksSnafu)
} }
/// Fetch author data from calibre, starting at `cursor`, fetching up to an amount of `limit` and /// Fetch author data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
@ -64,9 +245,9 @@ impl Calibre {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: &SortOrder, sort_order: &SortOrder,
) -> Result<Vec<Author>, DataStoreError> { ) -> Result<Vec<Author>, AuthorsError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(AuthorsDbPoolSnafu)?;
Author::multiple(&conn, limit, cursor, sort_order) Author::multiple(&conn, limit, cursor, sort_order).context(FetchAuthorsSnafu)
} }
/// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`, /// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`,
@ -77,27 +258,28 @@ impl Calibre {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: SortOrder, sort_order: SortOrder,
) -> Result<Vec<Book>, DataStoreError> { ) -> Result<Vec<Book>, AuthorBooksError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(AuthorBooksDbPoolSnafu)?;
Book::author_books(&conn, author_id, limit, cursor, sort_order) Book::author_books(&conn, author_id, limit, cursor, sort_order)
.context(FetchAuthorBooksSnafu)
} }
/// Get recent books up to a limit of `limit`. /// Get recent books up to a limit of `limit`.
pub fn recent_books(&self, limit: u64) -> Result<Vec<Book>, DataStoreError> { pub fn recent_books(&self, limit: u64) -> Result<Vec<Book>, RecentBooksError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(RecentBooksDbPoolSnafu)?;
Book::recents(&conn, limit) Book::recents(&conn, limit).context(FetchRecentBooksSnafu)
} }
/// Get a single book, specified `id`. /// Get a single book, specified `id`.
pub fn scalar_book(&self, id: u64) -> Result<Book, DataStoreError> { pub fn scalar_book(&self, id: u64) -> Result<Book, ScalarBookError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(ScalarBookDbPoolSnafu)?;
Book::scalar_book(&conn, id) Book::scalar_book(&conn, id).context(FetchScalarBookSnafu)
} }
/// Get the author to a book with id `id`. /// Get the author to a book with id `id`.
pub fn book_author(&self, id: u64) -> Result<Author, DataStoreError> { pub fn book_author(&self, id: u64) -> Result<Author, BookAuthorError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(BookAuthorDbPoolSnafu)?;
Author::book_author(&conn, id) Author::book_author(&conn, id).context(FetchBookAuthorSnafu)
} }
/// Fetch series data from calibre, starting at `cursor`, fetching up to an amount of `limit` and /// Fetch series data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
@ -107,69 +289,69 @@ impl Calibre {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: &SortOrder, sort_order: &SortOrder,
) -> Result<Vec<Series>, DataStoreError> { ) -> Result<Vec<Series>, MultipleSeriesError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(MultipleSeriesDbPoolSnafu)?;
Series::multiple(&conn, limit, cursor, sort_order) Series::multiple(&conn, limit, cursor, sort_order).context(FetchMultipleSeriesSnafu)
} }
/// Get the series a book with id `id` is in, as well as the book's position within the series. /// Get the series a book with id `id` is in, as well as the book's position within the series.
pub fn book_series(&self, id: u64) -> Result<Option<(Series, f64)>, DataStoreError> { pub fn book_series(&self, id: u64) -> Result<Option<(Series, f64)>, BookSeriesError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(BookSeriesDbPoolSnafu)?;
Series::book_series(&conn, id) Series::book_series(&conn, id).context(FetchBookSeriesSnafu)
} }
/// Get all books belonging to the series with id `id`. /// Get all books belonging to the series with id `id`.
pub fn series_books(&self, id: u64) -> Result<Vec<Book>, DataStoreError> { pub fn series_books(&self, id: u64) -> Result<Vec<Book>, SeriesBooksError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(SeriesBooksDbPoolSnafu)?;
Book::series_books(&conn, id) Book::series_books(&conn, id).context(FetchSeriesBooksSnafu)
} }
/// Check if there are more authors before the specified cursor. /// Check if there are more authors before the specified cursor.
pub fn has_previous_authors(&self, author_sort: &str) -> Result<bool, DataStoreError> { pub fn has_previous_authors(&self, author_sort: &str) -> Result<bool, HasPreviousAuthorsError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(HasPreviousAuthorsDbPoolSnafu)?;
Author::has_previous_authors(&conn, author_sort) Author::has_previous_authors(&conn, author_sort).context(FetchHasPreviousAuthorsSnafu)
} }
/// Check if there are more authors after the specified cursor. /// Check if there are more authors after the specified cursor.
pub fn has_more_authors(&self, author_sort: &str) -> Result<bool, DataStoreError> { pub fn has_more_authors(&self, author_sort: &str) -> Result<bool, HasMoreAuthorsError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(HasMoreAuthorsDbPoolSnafu)?;
Author::has_more_authors(&conn, author_sort) Author::has_more_authors(&conn, author_sort).context(FetchHasMoreAuthorsSnafu)
} }
/// Check if there are more books before the specified cursor. /// Check if there are more books before the specified cursor.
pub fn has_previous_books(&self, book_sort: &str) -> Result<bool, DataStoreError> { pub fn has_previous_books(&self, book_sort: &str) -> Result<bool, HasPreviousBooksError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(HasPreviousBooksDbPoolSnafu)?;
Book::has_previous_books(&conn, book_sort) Book::has_previous_books(&conn, book_sort).context(FetchHasPreviousBooksSnafu)
} }
/// Check if there are more books after the specified cursor. /// Check if there are more books after the specified cursor.
pub fn has_more_books(&self, book_sort: &str) -> Result<bool, DataStoreError> { pub fn has_more_books(&self, book_sort: &str) -> Result<bool, HasMoreBooksError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(HasMoreBooksDbPoolSnafu)?;
Book::has_more_books(&conn, book_sort) Book::has_more_books(&conn, book_sort).context(FetchHasMoreBooksSnafu)
} }
/// Check if there are more series before the specified cursor. /// Check if there are more series before the specified cursor.
pub fn has_previous_series(&self, series_sort: &str) -> Result<bool, DataStoreError> { pub fn has_previous_series(&self, series_sort: &str) -> Result<bool, HasPreviousSeriesError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(HasPreviousSeriesDbPoolSnafu)?;
Series::has_previous_series(&conn, series_sort) Series::has_previous_series(&conn, series_sort).context(FetchHasPreviousSeriesSnafu)
} }
/// Check if there are more series after the specified cursor. /// Check if there are more series after the specified cursor.
pub fn has_more_series(&self, series_sort: &str) -> Result<bool, DataStoreError> { pub fn has_more_series(&self, series_sort: &str) -> Result<bool, HasMoreSeriesError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(HasMoreSeriesDbPoolSnafu)?;
Series::has_more_series(&conn, series_sort) Series::has_more_series(&conn, series_sort).context(FetchHasMoreSeriesSnafu)
} }
/// Fetch a single author with id `id`. /// Fetch a single author with id `id`.
pub fn scalar_author(&self, id: u64) -> Result<Author, DataStoreError> { pub fn scalar_author(&self, id: u64) -> Result<Author, ScalarAuthorError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(ScalarAuthorDbPoolSnafu)?;
Author::scalar_author(&conn, id) Author::scalar_author(&conn, id).context(FetchScalarAuthorSnafu)
} }
/// Fetch a single series with id `id`. /// Fetch a single series with id `id`.
pub fn scalar_series(&self, id: u64) -> Result<Series, DataStoreError> { pub fn scalar_series(&self, id: u64) -> Result<Series, ScalarSeriesError> {
let conn = self.pool.get()?; let conn = self.pool.get().context(ScalarSeriesDbPoolSnafu)?;
Series::scalar_series(&conn, id) Series::scalar_series(&conn, id).context(FetchScalarSeriesSnafu)
} }
} }

View file

@ -1,12 +1,9 @@
//! Author data. //! Author data.
use rusqlite::{named_params, Connection, Row}; use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
use rusqlite::{Connection, Row, named_params};
use serde::Serialize; use serde::Serialize;
use snafu::{ResultExt, Snafu};
use super::{
error::DataStoreError,
pagination::{Pagination, SortOrder},
};
/// Author in calibre. /// Author in calibre.
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
@ -19,6 +16,40 @@ pub struct Author {
pub sort: String, pub sort: String,
} }
#[derive(Debug, Snafu)]
#[snafu(display("Failed to fetch multiple authors."))]
pub struct MultipleAuthorsError {
source: PaginationError,
}
#[derive(Debug, Snafu)]
pub enum BookAuthorError {
#[snafu(display("Failed to prepare statement."))]
PrepareBookAuthor { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteBookAuthor { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
pub enum ScalarAuthorError {
#[snafu(display("Failed to prepare statement."))]
PrepareScalarAuthor { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteScalarAuthor { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to check for previous authors."))]
pub struct PreviousAuthorsError {
source: HasPrevOrMoreError,
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to check for more authors."))]
pub struct MoreAuthorsError {
source: HasPrevOrMoreError,
}
impl Author { impl Author {
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> { fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
Ok(Self { Ok(Self {
@ -35,44 +66,54 @@ impl Author {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: &SortOrder, sort_order: &SortOrder,
) -> Result<Vec<Self>, DataStoreError> { ) -> Result<Vec<Self>, MultipleAuthorsError> {
let pagination = Pagination::new("sort", cursor, limit, *sort_order); let pagination = Pagination::new("sort", cursor, limit, *sort_order);
pagination.paginate( pagination
conn, .paginate(
"SELECT id, name, sort FROM authors", conn,
&[], "SELECT id, name, sort FROM authors",
Self::from_row, &[],
) Self::from_row,
)
.context(MultipleAuthorsSnafu)
} }
/// Get the author to a book with id `id`. /// Get the author to a book with id `id`.
pub fn book_author(conn: &Connection, id: u64) -> Result<Self, DataStoreError> { pub fn book_author(conn: &Connection, id: u64) -> Result<Self, BookAuthorError> {
let mut stmt = conn.prepare( let mut stmt = conn
"SELECT authors.id, authors.name, authors.sort FROM authors \ .prepare(
"SELECT authors.id, authors.name, authors.sort FROM authors \
INNER JOIN books_authors_link ON authors.id = books_authors_link.author \ INNER JOIN books_authors_link ON authors.id = books_authors_link.author \
WHERE books_authors_link.book = (:id)", WHERE books_authors_link.book = (:id)",
)?; )
.context(PrepareBookAuthorSnafu)?;
let params = named_params! { ":id": id }; let params = named_params! { ":id": id };
Ok(stmt.query_row(params, Self::from_row)?) stmt.query_row(params, Self::from_row)
.context(ExecuteBookAuthorSnafu)
} }
/// Fetch a single author with id `id`. /// Fetch a single author with id `id`.
pub fn scalar_author(conn: &Connection, id: u64) -> Result<Self, DataStoreError> { pub fn scalar_author(conn: &Connection, id: u64) -> Result<Self, ScalarAuthorError> {
let mut stmt = conn.prepare("SELECT id, name, sort FROM authors WHERE id = (:id)")?; let mut stmt = conn
.prepare("SELECT id, name, sort FROM authors WHERE id = (:id)")
.context(PrepareScalarAuthorSnafu)?;
let params = named_params! { ":id": id }; let params = named_params! { ":id": id };
Ok(stmt.query_row(params, Self::from_row)?) stmt.query_row(params, Self::from_row)
.context(ExecuteScalarAuthorSnafu)
} }
/// Check if there are more authors before the specified cursor. /// Check if there are more authors before the specified cursor.
pub fn has_previous_authors( pub fn has_previous_authors(
conn: &Connection, conn: &Connection,
sort_name: &str, sort_name: &str,
) -> Result<bool, DataStoreError> { ) -> Result<bool, PreviousAuthorsError> {
Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::DESC) Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::DESC)
.context(PreviousAuthorsSnafu)
} }
/// Check if there are more authors after the specified cursor. /// Check if there are more authors after the specified cursor.
pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> { pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result<bool, MoreAuthorsError> {
Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::ASC) Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::ASC)
.context(MoreAuthorsSnafu)
} }
} }

View file

@ -1,13 +1,11 @@
//! Book data. //! Book data.
use rusqlite::{named_params, Connection, Row}; use rusqlite::{Connection, Row, named_params};
use serde::Serialize; use serde::Serialize;
use time::OffsetDateTime; use time::OffsetDateTime;
use super::{ use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
error::DataStoreError, use snafu::{ResultExt, Snafu};
pagination::{Pagination, SortOrder},
};
/// Book in calibre. /// Book in calibre.
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
@ -28,6 +26,54 @@ pub struct Book {
pub description: Option<String>, pub description: Option<String>,
} }
#[derive(Debug, Snafu)]
#[snafu(display("Failed to fetch multiple books."))]
pub struct MultipleBooksError {
source: PaginationError,
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to fetch author's books."))]
pub struct AuthorBooksError {
source: PaginationError,
}
#[derive(Debug, Snafu)]
pub enum SeriesBookError {
#[snafu(display("Failed to prepare statement."))]
PrepareSeriesBook { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteSeriesBook { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
pub enum RecentBooksError {
#[snafu(display("Failed to prepare statement."))]
PrepareRecentBooks { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteRecentBooks { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
pub enum ScalarBookError {
#[snafu(display("Failed to prepare statement."))]
PrepareScalarBook { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteScalarBook { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to check for previous books."))]
pub struct PreviousBooksError {
source: HasPrevOrMoreError,
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to check for more books."))]
pub struct MoreBooksError {
source: HasPrevOrMoreError,
}
impl Book { impl Book {
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> { fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
Ok(Self { Ok(Self {
@ -48,7 +94,7 @@ impl Book {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: &SortOrder, sort_order: &SortOrder,
) -> Result<Vec<Self>, DataStoreError> { ) -> Result<Vec<Self>, MultipleBooksError> {
let pagination = Pagination::new("sort", cursor, limit, *sort_order); let pagination = Pagination::new("sort", cursor, limit, *sort_order);
pagination.paginate( pagination.paginate(
conn, conn,
@ -56,7 +102,7 @@ impl Book {
FROM books LEFT JOIN comments ON books.id = comments.book", FROM books LEFT JOIN comments ON books.id = comments.book",
&[], &[],
Self::from_row, Self::from_row,
) ).context(MultipleBooksSnafu)
} }
/// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`, /// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`,
@ -67,7 +113,7 @@ impl Book {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: SortOrder, sort_order: SortOrder,
) -> Result<Vec<Self>, DataStoreError> { ) -> Result<Vec<Self>, AuthorBooksError> {
let pagination = Pagination::new("books.sort", cursor, limit, sort_order); let pagination = Pagination::new("books.sort", cursor, limit, sort_order);
pagination.paginate( pagination.paginate(
conn, conn,
@ -77,11 +123,11 @@ impl Book {
WHERE books_authors_link.author = (:author_id) AND", WHERE books_authors_link.author = (:author_id) AND",
&[(":author_id", &author_id)], &[(":author_id", &author_id)],
Self::from_row, Self::from_row,
) ).context(AuthorBooksSnafu)
} }
/// Get all books belonging to the series with id `id`. /// Get all books belonging to the series with id `id`.
pub fn series_books(conn: &Connection, id: u64) -> Result<Vec<Book>, DataStoreError> { pub fn series_books(conn: &Connection, id: u64) -> Result<Vec<Book>, SeriesBookError> {
let mut stmt = conn.prepare( let mut stmt = conn.prepare(
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text FROM series \ "SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text FROM series \
INNER JOIN books_series_link ON series.id = books_series_link.series \ INNER JOIN books_series_link ON series.id = books_series_link.series \
@ -89,40 +135,50 @@ impl Book {
LEFT JOIN comments ON books.id = comments.book \ LEFT JOIN comments ON books.id = comments.book \
WHERE books_series_link.series = (:id) \ WHERE books_series_link.series = (:id) \
ORDER BY books.series_index", ORDER BY books.series_index",
)?; ).context(PrepareSeriesBookSnafu)?;
let params = named_params! { ":id": id }; let params = named_params! { ":id": id };
let iter = stmt.query_map(params, Self::from_row)?; let iter = stmt
.query_map(params, Self::from_row)
.context(ExecuteSeriesBookSnafu)?;
Ok(iter.filter_map(Result::ok).collect()) Ok(iter.filter_map(Result::ok).collect())
} }
/// Get recent books up to a limit of `limit`. /// Get recent books up to a limit of `limit`.
pub fn recents(conn: &Connection, limit: u64) -> Result<Vec<Self>, DataStoreError> { pub fn recents(conn: &Connection, limit: u64) -> Result<Vec<Self>, RecentBooksError> {
let mut stmt = conn.prepare( let mut stmt = conn.prepare(
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \ "SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \
FROM books LEFT JOIN comments ON books.id = comments.book ORDER BY books.timestamp DESC LIMIT (:limit)" FROM books LEFT JOIN comments ON books.id = comments.book ORDER BY books.timestamp DESC LIMIT (:limit)"
)?; ).context(PrepareRecentBooksSnafu)?;
let params = named_params! { ":limit": limit }; let params = named_params! { ":limit": limit };
let iter = stmt.query_map(params, Self::from_row)?; let iter = stmt
.query_map(params, Self::from_row)
.context(ExecuteRecentBooksSnafu)?;
Ok(iter.filter_map(Result::ok).collect()) Ok(iter.filter_map(Result::ok).collect())
} }
/// Get a single book, specified `id`. /// Get a single book, specified `id`.
pub fn scalar_book(conn: &Connection, id: u64) -> Result<Self, DataStoreError> { pub fn scalar_book(conn: &Connection, id: u64) -> Result<Self, ScalarBookError> {
let mut stmt = conn.prepare( let mut stmt = conn.prepare(
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \ "SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \
FROM books LEFT JOIN comments WHERE books.id = (:id)", FROM books LEFT JOIN comments WHERE books.id = (:id)",
)?; ).context(PrepareScalarBookSnafu)?;
let params = named_params! { ":id": id }; let params = named_params! { ":id": id };
Ok(stmt.query_row(params, Self::from_row)?) stmt.query_row(params, Self::from_row)
.context(ExecuteScalarBookSnafu)
} }
/// Check if there are more books before the specified cursor. /// Check if there are more books before the specified cursor.
pub fn has_previous_books(conn: &Connection, sort_title: &str) -> Result<bool, DataStoreError> { pub fn has_previous_books(
conn: &Connection,
sort_title: &str,
) -> Result<bool, PreviousBooksError> {
Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::DESC) Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::DESC)
.context(PreviousBooksSnafu)
} }
/// Check if there are more books after the specified cursor. /// Check if there are more books after the specified cursor.
pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result<bool, DataStoreError> { pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result<bool, MoreBooksError> {
Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::ASC) Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::ASC)
.context(MoreBooksSnafu)
} }
} }

View file

@ -1,42 +1 @@
//! Error handling for calibre database access.
use std::io;
use tempfile::PersistError;
use thiserror::Error;
use time::error::Parse;
/// Errors from accessing the calibre database.
#[derive(Error, Debug)]
#[error("data store error")]
pub enum DataStoreError {
/// Found no entries for the query.
#[error("no results")]
NoResults(rusqlite::Error),
/// Error with SQLite.
#[error("sqlite error")]
SqliteError(rusqlite::Error),
/// Error connecting to the database.
#[error("connection error")]
ConnectionError(#[from] r2d2::Error),
/// Error parsing a datetime from the database.
#[error("failed to parse datetime")]
DateTimeError(#[from] Parse),
/// Error creating the search database.
#[error("failed to create search database")]
SearchDbError(#[from] io::Error),
/// Error marking the search database as persistent.
#[error("failed to persist search database")]
PersistSearchDbError(#[from] PersistError),
}
/// Convert an SQLite error into a proper NoResults one if the query
/// returned no rows, return others as is.
impl From<rusqlite::Error> for DataStoreError {
fn from(error: rusqlite::Error) -> Self {
match error {
rusqlite::Error::QueryReturnedNoRows => DataStoreError::NoResults(error),
_ => DataStoreError::SqliteError(error),
}
}
}

View file

@ -1,9 +1,8 @@
//! Cursor pagination handling. //! Cursor pagination handling.
use rusqlite::{named_params, Connection, Row, ToSql}; use rusqlite::{Connection, Row, ToSql, named_params};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use snafu::{ResultExt, Snafu};
use super::error::DataStoreError;
/// How to sort query results. Signifying whether we are paginating forwards or backwards. /// How to sort query results. Signifying whether we are paginating forwards or backwards.
#[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)] #[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)]
@ -26,6 +25,22 @@ pub struct Pagination<'a> {
pub sort_order: SortOrder, pub sort_order: SortOrder,
} }
#[derive(Debug, Snafu)]
pub enum HasPrevOrMoreError {
#[snafu(display("Failed to prepare statement."))]
PrepareHasPrevOrMore { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteHasPrevOrMore { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
pub enum PaginationError {
#[snafu(display("Failed to prepare statement."))]
PreparePagination { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecutePagination { source: rusqlite::Error },
}
impl<'a> Pagination<'a> { impl<'a> Pagination<'a> {
/// Create a new pagination. /// Create a new pagination.
pub fn new( pub fn new(
@ -57,14 +72,16 @@ impl<'a> Pagination<'a> {
table: &str, table: &str,
sort: &str, sort: &str,
sort_order: &SortOrder, sort_order: &SortOrder,
) -> Result<bool, DataStoreError> { ) -> Result<bool, HasPrevOrMoreError> {
let comparison = Pagination::sort_order_to_sql(sort_order); let comparison = Pagination::sort_order_to_sql(sort_order);
let mut stmt = conn.prepare(&format!( let mut stmt = conn.prepare(&format!(
"SELECT Count(1) FROM {table} WHERE sort {comparison} (:sort) ORDER BY sort {sort_order:?}" "SELECT Count(1) FROM {table} WHERE sort {comparison} (:sort) ORDER BY sort {sort_order:?}"
))?; )).context(PrepareHasPrevOrMoreSnafu)?;
let params = named_params! { ":sort": sort}; let params = named_params! { ":sort": sort};
let count: u64 = stmt.query_row(params, |x| x.get(0))?; let count: u64 = stmt
.query_row(params, |x| x.get(0))
.context(ExecuteHasPrevOrMoreSnafu)?;
Ok(count > 0) Ok(count > 0)
} }
@ -76,7 +93,7 @@ impl<'a> Pagination<'a> {
statement: &str, statement: &str,
params: &[(&str, &dyn ToSql)], params: &[(&str, &dyn ToSql)],
processor: F, processor: F,
) -> Result<Vec<T>, DataStoreError> ) -> Result<Vec<T>, PaginationError>
where where
F: FnMut(&Row<'_>) -> Result<T, rusqlite::Error>, F: FnMut(&Row<'_>) -> Result<T, rusqlite::Error>,
{ {
@ -102,7 +119,7 @@ impl<'a> Pagination<'a> {
// DANGER: vulnerable to SQL injection if statement or sort_col variable is influenced by user input // DANGER: vulnerable to SQL injection if statement or sort_col variable is influenced by user input
let mut stmt = conn.prepare(&format!( let mut stmt = conn.prepare(&format!(
"SELECT * FROM ({statement} {where_sql} {sort_col} {comparison} (:cursor) ORDER BY {sort_col} {sort_order:?} LIMIT (:limit)) AS t ORDER BY {sort_col_wrapped} ASC" "SELECT * FROM ({statement} {where_sql} {sort_col} {comparison} (:cursor) ORDER BY {sort_col} {sort_order:?} LIMIT (:limit)) AS t ORDER BY {sort_col_wrapped} ASC"
))?; )).context(PreparePaginationSnafu)?;
let params = [ let params = [
&[ &[
(":cursor", &cursor as &dyn ToSql), (":cursor", &cursor as &dyn ToSql),
@ -111,7 +128,9 @@ impl<'a> Pagination<'a> {
params, params,
] ]
.concat(); .concat();
let iter = stmt.query_map(params.as_slice(), processor)?; let iter = stmt
.query_map(params.as_slice(), processor)
.context(ExecutePaginationSnafu)?;
Ok(iter.filter_map(Result::ok).collect()) Ok(iter.filter_map(Result::ok).collect())
} }
} }

View file

@ -1,12 +1,10 @@
//! Series data. //! Series data.
use rusqlite::{named_params, Connection, Row}; use rusqlite::{Connection, Row, named_params};
use serde::Serialize; use serde::Serialize;
use super::{ use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
error::DataStoreError, use snafu::{ResultExt, Snafu};
pagination::{Pagination, SortOrder},
};
/// Series in calibre. /// Series in calibre.
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
@ -19,6 +17,40 @@ pub struct Series {
pub sort: String, pub sort: String,
} }
#[derive(Debug, Snafu)]
#[snafu(display("Failed to fetch multiple series."))]
pub struct MultiplSeriesError {
source: PaginationError,
}
#[derive(Debug, Snafu)]
pub enum SeriesBooksError {
#[snafu(display("Failed to prepare statement."))]
PrepareSeriesBooks { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteSeriesBooks { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
pub enum ScalarSeriesError {
#[snafu(display("Failed to prepare statement."))]
PrepareScalarSeries { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteScalarSeries { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to check for previous series."))]
pub struct PreviousSeriesError {
source: HasPrevOrMoreError,
}
#[derive(Debug, Snafu)]
#[snafu(display("Failed to check for more series."))]
pub struct MoreSeriesError {
source: HasPrevOrMoreError,
}
impl Series { impl Series {
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> { fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
Ok(Self { Ok(Self {
@ -35,34 +67,41 @@ impl Series {
limit: u64, limit: u64,
cursor: Option<&str>, cursor: Option<&str>,
sort_order: &SortOrder, sort_order: &SortOrder,
) -> Result<Vec<Self>, DataStoreError> { ) -> Result<Vec<Self>, MultiplSeriesError> {
let pagination = Pagination::new("sort", cursor, limit, *sort_order); let pagination = Pagination::new("sort", cursor, limit, *sort_order);
pagination.paginate( pagination
conn, .paginate(
"SELECT id, name, sort FROM series", conn,
&[], "SELECT id, name, sort FROM series",
Self::from_row, &[],
) Self::from_row,
)
.context(MultiplSeriesSnafu)
} }
/// Fetch a single series with id `id`. /// Fetch a single series with id `id`.
pub fn scalar_series(conn: &Connection, id: u64) -> Result<Self, DataStoreError> { pub fn scalar_series(conn: &Connection, id: u64) -> Result<Self, ScalarSeriesError> {
let mut stmt = conn.prepare("SELECT id, name, sort FROM series WHERE id = (:id)")?; let mut stmt = conn
.prepare("SELECT id, name, sort FROM series WHERE id = (:id)")
.context(PrepareScalarSeriesSnafu)?;
let params = named_params! { ":id": id }; let params = named_params! { ":id": id };
Ok(stmt.query_row(params, Self::from_row)?) stmt.query_row(params, Self::from_row)
.context(ExecuteScalarSeriesSnafu)
} }
/// Get the series a book with id `id` is in, as well as the book's position within the series. /// Get the series a book with id `id` is in, as well as the book's position within the series.
pub fn book_series( pub fn book_series(
conn: &Connection, conn: &Connection,
book_id: u64, book_id: u64,
) -> Result<Option<(Self, f64)>, DataStoreError> { ) -> Result<Option<(Self, f64)>, SeriesBooksError> {
let mut stmt = conn.prepare( let mut stmt = conn
"SELECT series.id, series.name, series.sort, books.series_index FROM series \ .prepare(
"SELECT series.id, series.name, series.sort, books.series_index FROM series \
INNER JOIN books_series_link ON series.id = books_series_link.series \ INNER JOIN books_series_link ON series.id = books_series_link.series \
INNER JOIN books ON books.id = books_series_link.book \ INNER JOIN books ON books.id = books_series_link.book \
WHERE books_series_link.book = (:id)", WHERE books_series_link.book = (:id)",
)?; )
.context(PrepareSeriesBooksSnafu)?;
let params = named_params! { ":id": book_id }; let params = named_params! { ":id": book_id };
let from_row = |row: &Row<'_>| { let from_row = |row: &Row<'_>| {
@ -74,17 +113,22 @@ impl Series {
match stmt.query_row(params, from_row) { match stmt.query_row(params, from_row) {
Ok(series) => Ok(Some(series)), Ok(series) => Ok(Some(series)),
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
Err(e) => Err(DataStoreError::SqliteError(e)), Err(e) => Err(e).context(ExecuteSeriesBooksSnafu),
} }
} }
/// Check if there are more series before the specified cursor. /// Check if there are more series before the specified cursor.
pub fn has_previous_series(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> { pub fn has_previous_series(
conn: &Connection,
sort_name: &str,
) -> Result<bool, PreviousSeriesError> {
Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::DESC) Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::DESC)
.context(PreviousSeriesSnafu)
} }
/// Check if there are more series after the specified cursor. /// Check if there are more series after the specified cursor.
pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> { pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result<bool, MoreSeriesError> {
Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::ASC) Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::ASC)
.context(MoreSeriesSnafu)
} }
} }

View file

@ -10,8 +10,9 @@ use std::path::Path;
use r2d2::{Pool, PooledConnection}; use r2d2::{Pool, PooledConnection};
use r2d2_sqlite::SqliteConnectionManager; use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::named_params; use rusqlite::named_params;
use snafu::{ResultExt, Snafu};
use crate::data::{book::Book, error::DataStoreError}; use crate::data::book::Book;
/// A lot of joins but only run once at startup. /// A lot of joins but only run once at startup.
const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data) const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data)
@ -33,20 +34,61 @@ const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data)
LEFT JOIN main.series AS s ON b2s.series = s.id LEFT JOIN main.series AS s ON b2s.series = s.id
GROUP BY b.id"; GROUP BY b.id";
#[derive(Debug, Snafu)]
pub enum EnsureSearchDbError {
#[snafu(display("Failed to prepare statement."))]
PrepareEnsureSearch { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteEnsureSearch { source: rusqlite::Error },
#[snafu(display("Failed to attach database."))]
Attach { source: AttachError },
#[snafu(display("Failed to initialize database."))]
Init { source: InitError },
}
#[derive(Debug, Snafu)]
pub enum AttachError {
#[snafu(display("Failed to execute statement."))]
ExecuteAttach { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
pub enum InitError {
#[snafu(display("Failed to prepare statement."))]
PrepareInit { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteInit { source: rusqlite::Error },
}
#[derive(Debug, Snafu)]
pub enum SearchError {
#[snafu(display("Failed ensure the search db is initialized."))]
EnsureDb { source: EnsureSearchDbError },
#[snafu(display("Failed to get connection from pool."))]
Connection { source: r2d2::Error },
#[snafu(display("Failed to prepare statement."))]
PrepareSearch { source: rusqlite::Error },
#[snafu(display("Failed to execute statement."))]
ExecuteSearch { source: rusqlite::Error },
}
/// Ensure the search database is attached to the connection and /// Ensure the search database is attached to the connection and
/// initializes the data if needed. /// initializes the data if needed.
fn ensure_search_db( fn ensure_search_db(
conn: &PooledConnection<SqliteConnectionManager>, conn: &PooledConnection<SqliteConnectionManager>,
db_path: &Path, db_path: &Path,
) -> Result<(), DataStoreError> { ) -> Result<(), EnsureSearchDbError> {
let mut stmt = let mut stmt = conn
conn.prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'")?; .prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'")
let count: u64 = stmt.query_row([], |x| x.get(0))?; .context(PrepareEnsureSearchSnafu)?;
let count: u64 = stmt
.query_row([], |x| x.get(0))
.context(ExecuteEnsureSearchSnafu)?;
let need_attachment = count == 0; let need_attachment = count == 0;
if need_attachment { if need_attachment {
attach(conn, db_path)?; attach(conn, db_path).context(AttachSnafu)?;
init(conn)?; init(conn).context(InitSnafu)?;
} }
Ok(()) Ok(())
@ -56,29 +98,32 @@ fn ensure_search_db(
fn attach( fn attach(
conn: &PooledConnection<SqliteConnectionManager>, conn: &PooledConnection<SqliteConnectionManager>,
db_path: &Path, db_path: &Path,
) -> Result<(), DataStoreError> { ) -> Result<(), AttachError> {
conn.execute( conn.execute(
&format!("ATTACH DATABASE '{}' AS search", db_path.to_string_lossy()), &format!("ATTACH DATABASE '{}' AS search", db_path.to_string_lossy()),
[], [],
)?; )
init(conn)?; .context(ExecuteAttachSnafu)?;
Ok(()) Ok(())
} }
/// Initialise the fts virtual table. /// Initialise the fts virtual table.
fn init(conn: &PooledConnection<SqliteConnectionManager>) -> Result<(), DataStoreError> { fn init(conn: &PooledConnection<SqliteConnectionManager>) -> Result<(), InitError> {
let mut stmt = conn let mut stmt = conn
.prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'")?; .prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'")
let count: u64 = stmt.query_row([], |x| x.get(0))?; .context(PrepareInitSnafu)?;
let count: u64 = stmt.query_row([], |x| x.get(0)).context(ExecuteInitSnafu)?;
let need_init = count == 0; let need_init = count == 0;
if need_init { if need_init {
conn.execute( conn.execute(
"CREATE VIRTUAL TABLE search.fts USING fts5(book_id, data)", "CREATE VIRTUAL TABLE search.fts USING fts5(book_id, data)",
[], [],
)?; )
conn.execute(SEARCH_INIT_QUERY, [])?; .context(ExecuteInitSnafu)?;
conn.execute(SEARCH_INIT_QUERY, [])
.context(ExecuteInitSnafu)?;
} }
Ok(()) Ok(())
@ -89,15 +134,17 @@ pub(crate) fn search(
query: &str, query: &str,
pool: &Pool<SqliteConnectionManager>, pool: &Pool<SqliteConnectionManager>,
search_db_path: &Path, search_db_path: &Path,
) -> Result<Vec<Book>, DataStoreError> { ) -> Result<Vec<Book>, SearchError> {
let conn = pool.get()?; let conn = pool.get().context(ConnectionSnafu)?;
ensure_search_db(&conn, search_db_path)?; ensure_search_db(&conn, search_db_path).context(EnsureDbSnafu)?;
let mut stmt = let mut stmt = conn
conn.prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank")?; .prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank")
.context(PrepareSearchSnafu)?;
let params = named_params! { ":query": query }; let params = named_params! { ":query": query };
let books = stmt let books = stmt
.query_map(params, |r| -> Result<u64, rusqlite::Error> { r.get(0) })? .query_map(params, |r| -> Result<u64, rusqlite::Error> { r.get(0) })
.context(ExecuteSearchSnafu)?
.filter_map(Result::ok) .filter_map(Result::ok)
.filter_map(|id| Book::scalar_book(&conn, id).ok()) .filter_map(|id| Book::scalar_book(&conn, id).ok())
.collect(); .collect();

View file

@ -8,23 +8,28 @@ repository = { workspace = true }
description = "A very simple ebook server for a calibre library, providing a html interface as well as an OPDS feed." description = "A very simple ebook server for a calibre library, providing a html interface as well as an OPDS feed."
[dependencies] [dependencies]
axum = { version = "0.8.4", features = ["http2", "tracing"] }
calibre-db = { path = "../calibre-db/", version = "0.1.0" } calibre-db = { path = "../calibre-db/", version = "0.1.0" }
clap = { version = "4.5.40", features = ["derive", "env"] } clap = { version = "4.5.40", features = ["derive", "env"] }
image = { version = "0.25.6", default-features = false, features = ["jpeg", "rayon"] } image = { version = "0.25.6", default-features = false, features = ["jpeg", "rayon"] }
mime_guess = "2.0.5"
once_cell = "1.21.3" once_cell = "1.21.3"
poem = { version = "3.0.1", features = ["embed", "static-files"] }
rust-embed = "8.7.2" rust-embed = "8.7.2"
sha2 = "0.10.9" sha2 = "0.10.9"
serde = { workspace = true } serde = { workspace = true }
serde_json = "1.0.140" serde_json = "1.0.140"
serde_with = "3.14.0" serde_with = "3.14.0"
snafu = { workspace = true }
tera = "1.20.0" tera = "1.20.0"
thiserror = { workspace = true }
time = { workspace = true } time = { workspace = true }
tokio = { version = "1.45.1", features = ["signal", "rt-multi-thread", "macros"] } tokio = { version = "1.45.1", features = ["signal", "fs", "rt-multi-thread", "macros"] }
tokio-util = "0.7.15" tokio-util = "0.7.15"
tower-http = { version = "0.6.6", features = ["trace"] }
tracing = "0.1.41" tracing = "0.1.41"
tracing-subscriber = "0.3.19" tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
utoipa = { version = "5.4.0", features = ["axum_extras"] }
utoipa-axum = "0.2.0"
utoipa-swagger-ui = { version = "9.0.2", features = ["axum", "vendored"] }
uuid = { version = "1.17.0", features = ["v4", "fast-rng"] } uuid = { version = "1.17.0", features = ["v4", "fast-rng"] }
quick-xml = { version = "0.38.0", features = ["serialize"] } quick-xml = { version = "0.38.0", features = ["serialize"] }

86
little-hesinde/src/api.rs Normal file
View file

@ -0,0 +1,86 @@
use std::{io, net::SocketAddr};
use serde::Deserialize;
use snafu::{ResultExt, Snafu};
use tokio::net::TcpListener;
use utoipa::{OpenApi, ToSchema};
use utoipa_axum::router::OpenApiRouter;
use utoipa_swagger_ui::SwaggerUi;
use crate::app_state::AppState;
pub mod authors;
pub mod books;
pub mod download;
pub mod error;
pub mod html;
pub mod opds;
pub mod paginated;
pub mod routes;
pub mod search;
pub mod series;
pub mod static_files;
/// How to sort query results.
#[derive(Debug, Copy, Clone, PartialEq, Deserialize, ToSchema)]
#[serde(rename_all = "UPPERCASE")]
pub enum SortOrder {
ASC,
DESC,
}
impl From<SortOrder> for calibre_db::data::pagination::SortOrder {
fn from(val: SortOrder) -> Self {
match val {
SortOrder::ASC => calibre_db::data::pagination::SortOrder::ASC,
SortOrder::DESC => calibre_db::data::pagination::SortOrder::DESC,
}
}
}
/// OpenAPI tag for all endpoints.
const TAG: &str = "little-hesinde";
/// OpenAPI documentation configuration.
#[derive(OpenApi)]
#[openapi(
components(
schemas(
SortOrder
)
),
tags(
(name = TAG, description = "Browser and OPDS access to a calibre library.")
)
)]
struct ApiDoc;
/// Errors that occur when starting the HTTP server.
#[derive(Debug, Snafu)]
pub enum ServeError {
#[snafu(display("Failed to bind to {address}."))]
Bind {
source: io::Error,
address: SocketAddr,
},
#[snafu(display("Failed to run http server."))]
Serve { source: io::Error },
}
/// Start the HTTP API server with the given configuration.
pub async fn serve(address: SocketAddr, state: AppState) -> Result<(), ServeError> {
let (router, api) = OpenApiRouter::with_openapi(ApiDoc::openapi())
.merge(routes::router(state))
.split_for_parts();
let router =
router.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", api.clone()));
let listener = TcpListener::bind(&address)
.await
.context(BindSnafu { address })?;
axum::serve(listener, router.into_make_service())
.await
.context(ServeSnafu)
}

View file

@ -0,0 +1,36 @@
use std::path::Path;
use calibre_db::{
calibre::{AuthorBooksError, Calibre, ScalarAuthorError},
data::author::Author,
};
use crate::data::book::Book;
use snafu::{ResultExt, Snafu};
use super::SortOrder;
#[derive(Debug, Snafu)]
pub enum SingleAuthorError {
#[snafu(display("Failed to fetch author data."))]
AuthorData { source: ScalarAuthorError },
#[snafu(display("Failed to fetch books from author."))]
BookData { source: AuthorBooksError },
}
pub async fn single(
id: u64,
calibre: &Calibre,
library_path: &Path,
) -> Result<(Author, Vec<Book>), SingleAuthorError> {
let author = calibre.scalar_author(id).context(AuthorDataSnafu)?;
let books = calibre
.author_books(id, u32::MAX.into(), None, SortOrder::ASC.into())
.context(BookDataSnafu)?;
let books = books
.iter()
.filter_map(|x| Book::full_book(x, calibre, library_path))
.collect::<Vec<Book>>();
Ok((author, books))
}

View file

@ -0,0 +1,22 @@
use std::path::Path;
use calibre_db::calibre::{Calibre, RecentBooksError};
use snafu::{ResultExt, Snafu};
use crate::data::book::Book;
#[derive(Debug, Snafu)]
pub enum RecentBooksError {
#[snafu(display("Failed to fetch recent books."))]
RecentBooks { source: RecentBooksError },
}
pub async fn recent(calibre: &Calibre, library_path: &Path) -> Result<Vec<Book>, RecentBooksError> {
let recent_books = calibre.recent_books(25).context(RecentBooksSnafu)?;
let recent_books = recent_books
.iter()
.filter_map(|x| Book::full_book(x, calibre, library_path))
.collect::<Vec<Book>>();
Ok(recent_books)
}

View file

@ -0,0 +1,38 @@
use axum::{
body::Body,
http::{self, StatusCode, header},
response::Response,
};
use snafu::{ResultExt, Snafu};
use tokio::io::AsyncRead;
use tokio_util::io::ReaderStream;
#[derive(Debug, Snafu)]
pub enum DownloadError {
#[snafu(display("Failed to fetch cover."))]
Body { source: http::Error },
}
/// Handle a request for file.
///
/// Must not be used directly from a route as that makes it vulnerable to path traversal attacks.
pub async fn handler<A: AsyncRead + Send + Unpin + 'static>(
file_name: &str,
reader: A,
content_type: &str,
) -> Result<Response, DownloadError> {
let stream = ReaderStream::new(reader);
let body = Body::from_stream(stream);
let response = Response::builder()
.status(StatusCode::OK)
.header(
header::CONTENT_DISPOSITION,
format!("filename=\"{file_name}\""),
)
.header(header::CONTENT_TYPE, content_type)
.body(body)
.context(BodySnafu)?;
Ok(response)
}

View file

@ -0,0 +1,41 @@
//! HTTP error handling and response formatting.
use axum::http::StatusCode;
use serde::Serialize;
use utoipa::ToSchema;
/// Standard error response format for API endpoints.
#[derive(Serialize, ToSchema)]
pub struct ErrorResponse {
/// Unique identifier for tracking this error instance.
pub id: String,
/// Human-readable error message.
pub error: String,
}
/// Map error types to HTTP status codes.
pub trait HttpStatus {
/// Return the appropriate HTTP status code for this error.
fn status_code(&self) -> StatusCode;
}
/// Generate IntoResponse implementation for error types with JSON formatting.
#[macro_export]
macro_rules! http_error {
($error_type:ty) => {
impl axum::response::IntoResponse for $error_type {
fn into_response(self) -> axum::response::Response {
let status = self.status_code();
let id = uuid::Uuid::new_v4().to_string();
tracing::error!("{}: {}", &id, snafu::Report::from_error(&self));
let error_response = $crate::api::error::ErrorResponse {
id,
error: self.to_string(),
};
(status, axum::Json(error_response)).into_response()
}
}
};
}

View file

@ -0,0 +1,7 @@
pub mod archive;
pub mod authors;
pub mod books;
pub mod cover;
pub mod recent;
pub mod search;
pub mod series;

View file

@ -0,0 +1,45 @@
use axum::{http::StatusCode, response::Response};
use snafu::{ResultExt, Snafu};
use crate::{
APP_NAME, APP_VERSION,
api::{
TAG,
download::{self, DownloadError},
error::{ErrorResponse, HttpStatus},
},
http_error,
};
const SOURCE_ARCHIVE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/archive.zip"));
#[derive(Debug, Snafu)]
pub enum ArchiveError {
#[snafu(display("Failed to stream source code archive."))]
Download { source: DownloadError },
}
impl HttpStatus for ArchiveError {
fn status_code(&self) -> StatusCode {
match self {
ArchiveError::Download { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(ArchiveError);
/// Handle a request for source code of the server..
#[utoipa::path(
get,
path = "/archive",
tag = TAG,
responses(
(status = OK, content_type = "application/zip"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler() -> Result<Response, ArchiveError> {
let file_name = format!("{APP_NAME}-{APP_VERSION}.zip");
download::handler(&file_name, SOURCE_ARCHIVE, "application/zip")
.await
.context(DownloadSnafu)
}

View file

@ -0,0 +1,134 @@
use std::sync::Arc;
use crate::{
api::{
SortOrder, TAG,
authors::{self, SingleAuthorError},
error::{ErrorResponse, HttpStatus},
paginated::{self, PaginationError},
},
app_state::AppState,
http_error,
templates::TEMPLATES,
};
use axum::{
extract::{Path, State},
http::StatusCode,
response::{Html, IntoResponse, Response},
};
use snafu::{ResultExt, Snafu};
use tera::Context;
#[derive(Debug, Snafu)]
pub enum RetrieveError {
#[snafu(display("Failed to fetch pagination data."))]
Authors { source: AuthorError },
}
impl HttpStatus for RetrieveError {
fn status_code(&self) -> StatusCode {
match self {
RetrieveError::Authors { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(RetrieveError);
#[utoipa::path(
get,
path = "/authors",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler_init(State(state): State<Arc<AppState>>) -> Result<Response, RetrieveError> {
authors(&state, None, SortOrder::ASC)
.await
.context(AuthorsSnafu)
}
#[utoipa::path(
get,
path = "/authors/{cursor}/{sort_order}",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler(
Path((cursor, sort_order)): Path<(String, SortOrder)>,
State(state): State<Arc<AppState>>,
) -> Result<Response, RetrieveError> {
authors(&state, Some(&cursor), sort_order)
.await
.context(AuthorsSnafu)
}
#[derive(Debug, Snafu)]
pub enum AuthorError {
#[snafu(display("Failed to fetch pagination data."))]
Pagination { source: PaginationError },
}
async fn authors(
state: &Arc<AppState>,
cursor: Option<&str>,
sort_order: SortOrder,
) -> Result<Response, AuthorError> {
paginated::render(
"authors",
|| state.calibre.authors(25, cursor, &sort_order.into()),
|author| author.sort.clone(),
|cursor| state.calibre.has_previous_authors(cursor),
|cursor| state.calibre.has_more_authors(cursor),
)
.context(PaginationSnafu)
}
#[derive(Debug, Snafu)]
pub enum SingleError {
#[snafu(display("Failed to fetch author data."))]
Data { source: SingleAuthorError },
#[snafu(display("Failed to render template."))]
Render { source: tera::Error },
}
impl HttpStatus for SingleError {
fn status_code(&self) -> StatusCode {
match self {
SingleError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
SingleError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(SingleError);
#[utoipa::path(
get,
path = "/authors/{id}",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn single(
Path(id): Path<u64>,
State(state): State<Arc<AppState>>,
) -> Result<Response, SingleError> {
let (author, books) = authors::single(id, &state.calibre, &state.config.library_path)
.await
.context(DataSnafu)?;
let mut context = Context::new();
context.insert("title", &author.name);
context.insert("nav", "authors");
context.insert("books", &books);
Ok(TEMPLATES
.render("book_list", &context)
.context(RenderSnafu)
.map(Html)?
.into_response())
}

View file

@ -0,0 +1,155 @@
use std::{io, sync::Arc};
use crate::{
api::{
SortOrder, TAG, download,
error::{ErrorResponse, HttpStatus},
paginated::{self, PaginationError},
},
app_state::AppState,
data::book::{Book, Format},
http_error,
opds::media_type::MediaType,
};
use axum::{
extract::{Path, State},
http::StatusCode,
response::Response,
};
use calibre_db::calibre::ScalarBookError;
use snafu::{ResultExt, Snafu};
use tokio::fs::File;
#[derive(Debug, Snafu)]
pub enum RetrieveError {
#[snafu(display("Failed to fetch pagination data."))]
Books { source: BookError },
}
impl HttpStatus for RetrieveError {
fn status_code(&self) -> StatusCode {
match self {
RetrieveError::Books { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(RetrieveError);
#[utoipa::path(
get,
path = "/books",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler_init(State(state): State<Arc<AppState>>) -> Result<Response, RetrieveError> {
books(&state, None, SortOrder::ASC)
.await
.context(BooksSnafu)
}
#[utoipa::path(
get,
path = "/books/{cursor}/{sort_order}",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler(
Path((cursor, sort_order)): Path<(String, SortOrder)>,
State(state): State<Arc<AppState>>,
) -> Result<Response, RetrieveError> {
books(&state, Some(&cursor), sort_order)
.await
.context(BooksSnafu)
}
#[derive(Debug, Snafu)]
pub enum BookError {
#[snafu(display("Failed to fetch pagination data."))]
Pagination { source: PaginationError },
}
async fn books(
state: &Arc<AppState>,
cursor: Option<&str>,
sort_order: SortOrder,
) -> Result<Response, BookError> {
paginated::render(
"books",
|| {
state
.calibre
.books(25, cursor, &sort_order.into())
.map(|x| {
x.iter()
.filter_map(|y| {
Book::full_book(y, &state.calibre, &state.config.library_path)
})
.collect()
})
},
|book| book.data.sort.clone(),
|cursor| state.calibre.has_previous_books(cursor),
|cursor| state.calibre.has_more_books(cursor),
)
.context(PaginationSnafu)
}
#[derive(Debug, Snafu)]
pub enum DownloadError {
#[snafu(display("Failed to fetch book data."))]
BookData { source: ScalarBookError },
#[snafu(display("No such book."))]
NotFound,
#[snafu(display("No such book."))]
FileNotFound { source: io::Error },
#[snafu(display("Failed to stream book file."))]
Stream { source: download::DownloadError },
}
impl HttpStatus for DownloadError {
fn status_code(&self) -> StatusCode {
match self {
DownloadError::BookData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
DownloadError::NotFound => StatusCode::NOT_FOUND,
DownloadError::FileNotFound { source: _ } => StatusCode::NOT_FOUND,
DownloadError::Stream { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(DownloadError);
#[utoipa::path(
get,
path = "/book/{id}/{format}",
tag = TAG,
responses(
(status = OK, content_type = "application/*"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn download(
Path((id, format)): Path<(u64, String)>,
State(state): State<Arc<AppState>>,
) -> Result<Response, DownloadError> {
let book = state.calibre.scalar_book(id).context(BookDataSnafu)?;
let book = Book::full_book(&book, &state.calibre, &state.config.library_path)
.ok_or(NotFoundSnafu.build())?;
let format = Format(format);
let file_name = book.formats.get(&format).ok_or(NotFoundSnafu.build())?;
let file_path = state
.config
.library_path
.join(book.data.path)
.join(file_name);
let file = File::open(file_path).await.context(FileNotFoundSnafu)?;
let content_type: MediaType = format.into();
let content_type = format!("{content_type}");
download::handler(file_name, file, &content_type)
.await
.context(StreamSnafu)
}

View file

@ -0,0 +1,125 @@
use axum::{
extract::{Path, State},
http::StatusCode,
response::Response,
};
use calibre_db::calibre::{Calibre, ScalarBookError};
use snafu::{ResultExt, Snafu};
use std::{fs::File, io, path::Path as FilePath, sync::Arc};
use tokio::fs::File as AsyncFile;
use crate::{
api::{
TAG,
download::{self, DownloadError},
error::{ErrorResponse, HttpStatus},
},
app_state::AppState,
cache::{self, RetrieveThumbnailError},
http_error,
};
#[derive(Debug, Snafu)]
pub enum RetrieveError {
#[snafu(display("Failed to fetch cover."))]
Cover { source: CoverError },
#[snafu(display("Failed to open cover."))]
CoverOpen { source: io::Error },
}
impl HttpStatus for RetrieveError {
fn status_code(&self) -> StatusCode {
match self {
RetrieveError::Cover { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
RetrieveError::CoverOpen { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(RetrieveError);
#[utoipa::path(
get,
path = "/cover/{id}/thumbnail",
tag = TAG,
responses(
(status = OK, content_type = "image/jpeg"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn thumbnail(
Path(id): Path<u64>,
State(state): State<Arc<AppState>>,
) -> Result<Response, RetrieveError> {
cover(
&state.calibre,
&state.config.library_path,
&state.config.cache_path,
id,
|cover_path, cache_path| {
cache::get_thumbnail(cover_path, cache_path).context(ThumbnailSnafu)
},
)
.await
.context(CoverSnafu)
}
#[utoipa::path(
get,
path = "/cover/{id}",
tag = TAG,
responses(
(status = OK, content_type = "image/jpeg"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn full(
Path(id): Path<u64>,
State(state): State<Arc<AppState>>,
) -> Result<Response, RetrieveError> {
cover(
&state.calibre,
&state.config.library_path,
&state.config.cache_path,
id,
|cover_path, _| File::open(cover_path).context(FileOpenSnafu),
)
.await
.context(CoverSnafu)
}
#[derive(Debug, Snafu)]
pub enum CoverError {
#[snafu(display("Failed to fetch book data."))]
BookData { source: ScalarBookError },
#[snafu(display("No such cover"))]
NotFound { source: CoverFetchError },
#[snafu(display("Failed to fetch cover thumbnail."))]
StreamCover { source: DownloadError },
}
#[derive(Debug, Snafu)]
pub enum CoverFetchError {
#[snafu(display("Failed to fetch cover thumbnail."))]
Thumbnail { source: RetrieveThumbnailError },
#[snafu(display("Failed to open cover file."))]
FileOpen { source: io::Error },
}
async fn cover<F>(
calibre: &Calibre,
library_path: &FilePath,
cache_path: &FilePath,
id: u64,
f: F,
) -> Result<Response, CoverError>
where
F: Fn(&FilePath, &FilePath) -> Result<File, CoverFetchError>,
{
let book = calibre.scalar_book(id).context(BookDataSnafu)?;
let cover_path = library_path.join(book.path).join("cover.jpg");
let cover = f(&cover_path, cache_path).context(NotFoundSnafu)?;
let cover = AsyncFile::from_std(cover);
download::handler("cover.jpg", cover, "image/jpeg")
.await
.context(StreamCoverSnafu)
}

View file

@ -0,0 +1,70 @@
//! Handle requests for recent books.
use std::sync::Arc;
use axum::{
extract::State,
http::StatusCode,
response::{Html, IntoResponse, Response},
};
use snafu::{ResultExt, Snafu};
use tera::Context;
use crate::{
api::{
TAG,
books::{self, RecentBooksError},
error::{ErrorResponse, HttpStatus},
},
app_state::AppState,
http_error,
templates::TEMPLATES,
};
/// Errors that occur during query processing.
#[derive(Debug, Snafu)]
pub enum RecentError {
#[snafu(display("Failed to fetch recent books."))]
RecentBooks { source: RecentBooksError },
#[snafu(display("Failed to render template."))]
Template { source: tera::Error },
}
impl HttpStatus for RecentError {
fn status_code(&self) -> StatusCode {
match self {
RecentError::RecentBooks { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
RecentError::Template { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(RecentError);
/// Get recently added books
///
/// Provides a list of the 25 most recently added books.
/// The format can be either HTML or an OPDS feed, depending on the `Accept` header.
#[utoipa::path(
get,
path = "/recent",
tag = TAG,
responses(
(status = 200, description = "List of recent books", content_type = "text/html"),
(status = 500, description = "Error retrieving books from database", body = ErrorResponse)
)
)]
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, RecentError> {
let recent_books = books::recent(&state.calibre, &state.config.library_path)
.await
.context(RecentBooksSnafu)?;
let mut context = Context::new();
context.insert("title", "");
context.insert("nav", "recent");
context.insert("books", &recent_books);
Ok(TEMPLATES
.render("book_list", &context)
.map(Html)
.context(TemplateSnafu)?
.into_response())
}

View file

@ -0,0 +1,73 @@
use std::sync::Arc;
use axum::{
extract::{Query, State},
http::StatusCode,
response::{Html, IntoResponse, Response},
};
use serde::Deserialize;
use snafu::{ResultExt, Snafu};
use tera::Context;
use crate::{
api::{
TAG,
error::{ErrorResponse, HttpStatus},
search::{self, SearchQueryError},
},
app_state::AppState,
http_error,
templates::TEMPLATES,
};
/// Errors that occur during query processing.
#[derive(Debug, Snafu)]
pub enum SearchError {
#[snafu(display("Failed to search for books."))]
Query { source: SearchQueryError },
#[snafu(display("Failed to render template."))]
Template { source: tera::Error },
}
impl HttpStatus for SearchError {
fn status_code(&self) -> StatusCode {
match self {
SearchError::Query { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
SearchError::Template { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(SearchError);
#[derive(Deserialize)]
pub struct Params {
/// Query for a search request.
query: String,
}
#[utoipa::path(
get,
path = "/search",
tag = TAG,
responses(
(status = 200, content_type = "text/html"),
(status = 500, description = "Error retrieving books from database", body = ErrorResponse)
)
)]
pub async fn handler(
Query(params): Query<Params>,
State(state): State<Arc<AppState>>,
) -> Result<Response, SearchError> {
let books = search::query(&params.query, &state.calibre, &state.config.library_path)
.await
.context(QuerySnafu)?;
let mut context = Context::new();
context.insert("title", "Search Results");
context.insert("nav", "search");
context.insert("books", &books);
Ok(TEMPLATES
.render("book_list", &context)
.context(TemplateSnafu)
.map(Html)?
.into_response())
}

View file

@ -0,0 +1,134 @@
use std::sync::Arc;
use crate::{
api::{
SortOrder, TAG,
error::{ErrorResponse, HttpStatus},
paginated::{self, PaginationError},
series::{self, SingleSeriesError},
},
app_state::AppState,
http_error,
templates::TEMPLATES,
};
use axum::{
extract::{Path, State},
http::StatusCode,
response::{Html, IntoResponse, Response},
};
use snafu::{ResultExt, Snafu};
use tera::Context;
#[derive(Debug, Snafu)]
pub enum RetrieveError {
#[snafu(display("Failed to fetch series data."))]
Series { source: SeriesError },
}
impl HttpStatus for RetrieveError {
fn status_code(&self) -> StatusCode {
match self {
RetrieveError::Series { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(RetrieveError);
#[utoipa::path(
get,
path = "/series",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler_init(State(state): State<Arc<AppState>>) -> Result<Response, RetrieveError> {
series(&state, None, SortOrder::ASC)
.await
.context(SeriesSnafu)
}
#[utoipa::path(
get,
path = "/series/{cursor}/{sort_order}",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler(
Path((cursor, sort_order)): Path<(String, SortOrder)>,
State(state): State<Arc<AppState>>,
) -> Result<Response, RetrieveError> {
series(&state, Some(&cursor), sort_order)
.await
.context(SeriesSnafu)
}
#[derive(Debug, Snafu)]
pub enum SeriesError {
#[snafu(display("Failed to fetch pagination data."))]
Pagination { source: PaginationError },
}
async fn series(
state: &Arc<AppState>,
cursor: Option<&str>,
sort_order: SortOrder,
) -> Result<Response, SeriesError> {
paginated::render(
"series",
|| state.calibre.series(25, cursor, &sort_order.into()),
|series| series.sort.clone(),
|cursor| state.calibre.has_previous_series(cursor),
|cursor| state.calibre.has_more_series(cursor),
)
.context(PaginationSnafu)
}
#[derive(Debug, Snafu)]
pub enum SingleError {
#[snafu(display("Failed to fetch series data."))]
Data { source: SingleSeriesError },
#[snafu(display("Failed to render template."))]
Render { source: tera::Error },
}
impl HttpStatus for SingleError {
fn status_code(&self) -> StatusCode {
match self {
SingleError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
SingleError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(SingleError);
#[utoipa::path(
get,
path = "/series/{id}",
tag = TAG,
responses(
(status = OK, content_type = "text/html"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn single(
Path(id): Path<u64>,
State(state): State<Arc<AppState>>,
) -> Result<Response, SingleError> {
let (series, books) = series::single(id, &state.calibre, &state.config.library_path)
.await
.context(DataSnafu)?;
let mut context = Context::new();
context.insert("title", &series.name);
context.insert("nav", "series");
context.insert("books", &books);
Ok(TEMPLATES
.render("book_list", &context)
.context(RenderSnafu)
.map(Html)?
.into_response())
}

View file

@ -0,0 +1,5 @@
pub mod authors;
pub mod books;
pub mod recent;
pub mod search;
pub mod series;

View file

@ -0,0 +1,138 @@
use std::sync::Arc;
use crate::{
APP_NAME,
api::{
SortOrder, TAG,
authors::{self, SingleAuthorError},
error::{ErrorResponse, HttpStatus},
},
app_state::AppState,
http_error,
opds::{
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
relation::Relation,
},
};
use axum::{
extract::{Path, State},
http::{StatusCode, header},
response::{IntoResponse, Response},
};
use calibre_db::{calibre::AuthorsError, data::author::Author as DbAuthor};
use snafu::{ResultExt, Snafu};
use time::OffsetDateTime;
#[derive(Debug, Snafu)]
pub enum AuthorsError {
#[snafu(display("Failed to fetch author data."))]
Data { source: AuthorsError },
#[snafu(display("Failed to render author data."))]
Render { source: AsXmlError },
}
impl HttpStatus for AuthorsError {
fn status_code(&self) -> StatusCode {
match self {
AuthorsError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
AuthorsError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(AuthorsError);
/// Render all authors as OPDS entries embedded in a feed.
#[utoipa::path(
get,
path = "/authors",
tag = TAG,
responses(
(status = OK, content_type = "application/atom+xml"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, AuthorsError> {
let authors: Vec<DbAuthor> = state
.calibre
.authors(u32::MAX.into(), None, &SortOrder::ASC.into())
.context(DataSnafu)?;
let entries: Vec<Entry> = authors.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/authors".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:authors"),
"All Authors",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().context(RenderSnafu)?;
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
}
#[derive(Debug, Snafu)]
pub enum SingleError {
#[snafu(display("Failed to fetch author data."))]
AuthorData { source: SingleAuthorError },
#[snafu(display("Failed to render feed."))]
FeedRender { source: AsXmlError },
}
impl HttpStatus for SingleError {
fn status_code(&self) -> StatusCode {
match self {
SingleError::AuthorData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
SingleError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(SingleError);
/// Render a single series as an OPDS entry embedded in a feed.
#[utoipa::path(
get,
path = "/authors/{id}",
tag = TAG,
responses(
(status = OK, content_type = "application/atom+xml"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn single(
Path(id): Path<u64>,
State(state): State<Arc<AppState>>,
) -> Result<Response, SingleError> {
let (author, books) = authors::single(id, &state.calibre, &state.config.library_path)
.await
.context(AuthorDataSnafu)?;
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: format!("/opds/authors/{}", author.id),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}author:{}", author.id),
&author.name,
self_link,
vec![],
entries,
);
let xml = feed.as_xml().context(FeedRenderSnafu)?;
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
}

View file

@ -0,0 +1,95 @@
use std::sync::Arc;
use crate::{
APP_NAME,
api::{
SortOrder, TAG,
error::{ErrorResponse, HttpStatus},
},
app_state::AppState,
data::book::Book,
http_error,
opds::{
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
relation::Relation,
},
};
use axum::{
extract::State,
http::{StatusCode, header},
response::{IntoResponse, Response},
};
use calibre_db::calibre::BooksError;
use snafu::{ResultExt, Snafu};
use time::OffsetDateTime;
#[derive(Debug, Snafu)]
pub enum OdpsBooksError {
#[snafu(display("Failed to fetch book data."))]
Data { source: BooksError },
#[snafu(display("Failed to render book data."))]
Render { source: RenderError },
}
impl HttpStatus for OdpsBooksError {
fn status_code(&self) -> StatusCode {
match self {
OdpsBooksError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
OdpsBooksError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(OdpsBooksError);
/// Render all books as OPDS entries embedded in a feed.
#[utoipa::path(
get,
path = "/books",
tag = TAG,
responses(
(status = OK, content_type = "application/atom+xml"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, OdpsBooksError> {
let books: Vec<Book> = state
.calibre
.books(u32::MAX.into(), None, &SortOrder::ASC.into())
.map(|x| {
x.iter()
.filter_map(|y| Book::full_book(y, &state.calibre, &state.config.library_path))
.collect()
})
.context(DataSnafu)?;
render_books(books).await.context(RenderSnafu)
}
#[derive(Debug, Snafu)]
pub enum RenderError {
#[snafu(display("Failed to create opds feed."))]
Feed { source: AsXmlError },
}
/// Render a list of books as OPDS entries in a feed.
pub(crate) async fn render_books(books: Vec<Book>) -> Result<Response, RenderError> {
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/books".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:books"),
"All Books",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().context(FeedSnafu)?;
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
}

View file

@ -0,0 +1,76 @@
use std::sync::Arc;
use crate::{
APP_NAME,
api::{
TAG,
books::{self, RecentBooksError},
error::{ErrorResponse, HttpStatus},
},
app_state::AppState,
http_error,
opds::{
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
relation::Relation,
},
};
use axum::{
extract::State,
http::{StatusCode, header},
response::{IntoResponse, Response},
};
use snafu::{ResultExt, Snafu};
use time::OffsetDateTime;
#[derive(Debug, Snafu)]
pub enum RecentError {
#[snafu(display("Failed to fetch recent books."))]
Data { source: RecentBooksError },
#[snafu(display("Failed to render feed."))]
Render { source: AsXmlError },
}
impl HttpStatus for RecentError {
fn status_code(&self) -> StatusCode {
match self {
RecentError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
RecentError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(RecentError);
#[utoipa::path(
get,
path = "/recent",
tag = TAG,
responses(
(status = OK, content_type = "application/atom+xml"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, RecentError> {
let recent_books = books::recent(&state.calibre, &state.config.library_path)
.await
.context(DataSnafu)?;
let entries: Vec<Entry> = recent_books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/recent".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:recentbooks"),
"Recent Books",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().context(RenderSnafu)?;
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
}

View file

@ -0,0 +1,107 @@
use std::sync::Arc;
use crate::{
APP_NAME,
api::{
TAG,
error::{ErrorResponse, HttpStatus},
search::{self, SearchQueryError},
},
app_state::AppState,
http_error,
opds::{
error::AsXmlError,
search::{OpenSearchDescription, Url},
},
};
use axum::{
extract::{Query, State},
http::{StatusCode, header},
response::{IntoResponse, Response},
};
use serde::Deserialize;
use snafu::{ResultExt, Snafu};
use super::books::{RenderError, render_books};
#[derive(Debug, Snafu)]
pub enum SearchError {
#[snafu(display("Failed to query books."))]
Query { source: SearchQueryError },
#[snafu(display("Failed to render feed."))]
Render { source: RenderError },
}
impl HttpStatus for SearchError {
fn status_code(&self) -> StatusCode {
match self {
SearchError::Query { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
SearchError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(SearchError);
#[derive(Deserialize)]
pub struct Params {
/// Query for a search request.
query: String,
}
#[utoipa::path(
get,
path = "/search",
tag = TAG,
responses(
(status = 200, content_type = "application/atom+xml"),
(status = 500, description = "Error retrieving books from database", body = ErrorResponse)
)
)]
pub async fn handler(
Query(params): Query<Params>,
State(state): State<Arc<AppState>>,
) -> Result<Response, SearchError> {
let books = search::query(&params.query, &state.calibre, &state.config.library_path)
.await
.context(QuerySnafu)?;
render_books(books).await.context(RenderSnafu)
}
#[derive(Debug, Snafu)]
pub enum InfoError {
#[snafu(display("Failed to render feed."))]
FeedRender { source: AsXmlError },
}
impl HttpStatus for InfoError {
fn status_code(&self) -> StatusCode {
match self {
InfoError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(InfoError);
#[utoipa::path(
get,
path = "/search/info",
tag = TAG,
responses(
(status = 200, content_type = "application/atom+xml"),
(status = 500, description = "Internal error", body = ErrorResponse)
)
)]
pub async fn info() -> Result<Response, InfoError> {
let search = OpenSearchDescription {
short_name: APP_NAME.to_string(),
description: "Search for ebooks".to_string(),
input_encoding: "UTF-8".to_string(),
output_encoding: "UTF-8".to_string(),
url: Url {
type_name: "application/atom+xml".to_string(),
template: "/opds/search?query={searchTerms}".to_string(),
},
};
let xml = search.as_xml().context(FeedRenderSnafu)?;
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
}

View file

@ -0,0 +1,138 @@
use std::sync::Arc;
use crate::{
APP_NAME,
api::{
SortOrder, TAG,
error::{ErrorResponse, HttpStatus},
series::{self, SingleSeriesError},
},
app_state::AppState,
http_error,
opds::{
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
relation::Relation,
},
};
use axum::{
extract::{Path, State},
http::{StatusCode, header},
response::{IntoResponse, Response},
};
use calibre_db::calibre::MultipleSeriesError;
use snafu::{ResultExt, Snafu};
use time::OffsetDateTime;
#[derive(Debug, Snafu)]
pub enum SeriesError {
#[snafu(display("Failed to fetch series data."))]
Data { source: MultipleSeriesError },
#[snafu(display("Failed to render series data."))]
Render { source: AsXmlError },
}
impl HttpStatus for SeriesError {
fn status_code(&self) -> StatusCode {
match self {
SeriesError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
SeriesError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(SeriesError);
/// Render all series as OPDS entries embedded in a feed.
#[utoipa::path(
get,
path = "/series",
tag = TAG,
responses(
(status = OK, content_type = "application/atom+xml"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, SeriesError> {
let series = state
.calibre
.series(u32::MAX.into(), None, &SortOrder::ASC.into())
.context(DataSnafu)?;
let entries: Vec<Entry> = series.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/series".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:series"),
"All Series",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().context(RenderSnafu)?;
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
}
#[derive(Debug, Snafu)]
pub enum SingleError {
#[snafu(display("Failed to fetch series data."))]
SeriesData { source: SingleSeriesError },
#[snafu(display("Failed to render feed."))]
FeedRender { source: AsXmlError },
}
impl HttpStatus for SingleError {
fn status_code(&self) -> StatusCode {
match self {
SingleError::SeriesData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
SingleError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
http_error!(SingleError);
/// Render a single series as an OPDS entry embedded in a feed.
#[utoipa::path(
get,
path = "/series/{id}",
tag = TAG,
responses(
(status = OK, content_type = "application/atom+xml"),
(status = 500, description = "Server failure.", body = ErrorResponse)
)
)]
pub async fn single(
Path(id): Path<u64>,
State(state): State<Arc<AppState>>,
) -> Result<Response, SingleError> {
let (series, books) = series::single(id, &state.calibre, &state.config.library_path)
.await
.context(SeriesDataSnafu)?;
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: format!("/opds/series/{}", series.id),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:series:{}", series.id),
&series.name,
self_link,
vec![],
entries,
);
let xml = feed.as_xml().context(FeedRenderSnafu)?;
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
}

View file

@ -1,13 +1,25 @@
//! Deal with cursor pagination. //! Deal with cursor pagination.
use super::error::HandlerError;
use crate::templates::TEMPLATES; use crate::templates::TEMPLATES;
use axum::response::{Html, IntoResponse, Response};
use calibre_db::data::error::DataStoreError; use calibre_db::data::error::DataStoreError;
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
use serde::Serialize; use serde::Serialize;
use snafu::{ResultExt, Snafu};
use std::fmt::Debug; use std::fmt::Debug;
use tera::Context; use tera::Context;
#[derive(Debug, Snafu)]
pub enum PaginationError {
#[snafu(display("Failed to fetch pagination data."))]
Fetch { source: DataStoreError },
#[snafu(display("Failed to render template."))]
Template { source: tera::Error },
#[snafu(display("Failed to fetch previous items."))]
Previous { source: DataStoreError },
#[snafu(display("Failed to fetch more items."))]
More { source: DataStoreError },
}
/// Render a tera template with paginated items and generate back and forth links. /// Render a tera template with paginated items and generate back and forth links.
pub fn render<T: Serialize + Debug, F, S, P, M>( pub fn render<T: Serialize + Debug, F, S, P, M>(
template: &str, template: &str,
@ -15,7 +27,7 @@ pub fn render<T: Serialize + Debug, F, S, P, M>(
sort_field: S, sort_field: S,
has_previous: P, has_previous: P,
has_more: M, has_more: M,
) -> Result<Response, poem::Error> ) -> Result<Response, PaginationError>
where where
F: Fn() -> Result<Vec<T>, DataStoreError>, F: Fn() -> Result<Vec<T>, DataStoreError>,
S: Fn(&T) -> String, S: Fn(&T) -> String,
@ -25,11 +37,11 @@ where
let mut context = Context::new(); let mut context = Context::new();
context.insert("nav", template); context.insert("nav", template);
let items = fetcher().map_err(HandlerError::DataError)?; let items = fetcher().context(FetchSnafu)?;
if items.is_empty() { if items.is_empty() {
return Ok(TEMPLATES return Ok(TEMPLATES
.render("empty", &context) .render("empty", &context)
.map_err(InternalServerError) .context(TemplateSnafu)
.map(Html)? .map(Html)?
.into_response()); .into_response());
} }
@ -39,8 +51,8 @@ where
let (backward_cursor, forward_cursor) = (sort_field(first_item), sort_field(last_item)); let (backward_cursor, forward_cursor) = (sort_field(first_item), sort_field(last_item));
let has_previous = has_previous(&backward_cursor).map_err(HandlerError::DataError)?; let has_previous = has_previous(&backward_cursor).context(PreviousSnafu)?;
let has_more = has_more(&forward_cursor).map_err(HandlerError::DataError)?; let has_more = has_more(&forward_cursor).context(MoreSnafu)?;
context.insert("has_previous", &has_previous); context.insert("has_previous", &has_previous);
context.insert("has_more", &has_more); context.insert("has_more", &has_more);
@ -50,7 +62,7 @@ where
Ok(TEMPLATES Ok(TEMPLATES
.render(template, &context) .render(template, &context)
.map_err(InternalServerError) .context(TemplateSnafu)
.map(Html)? .map(Html)?
.into_response()) .into_response())
} }

View file

View file

@ -0,0 +1,53 @@
//! HTTP route definitions and router configuration.
use std::sync::Arc;
use axum::{response::Redirect, routing::get};
use tower_http::trace::TraceLayer;
use utoipa_axum::{router::OpenApiRouter, routes};
use crate::{
api::{html, opds, static_files},
app_state::AppState,
};
/// Create the main API router with all endpoints and middleware.
pub fn router(state: AppState) -> OpenApiRouter {
let store = Arc::new(state);
let opds_routes = OpenApiRouter::new()
.routes(routes!(opds::books::handler))
.routes(routes!(opds::recent::handler))
.routes(routes!(opds::series::handler))
.routes(routes!(opds::series::single))
.routes(routes!(opds::authors::handler))
.routes(routes!(opds::authors::single))
.routes(routes!(opds::search::handler))
.routes(routes!(opds::search::info))
.layer(TraceLayer::new_for_http())
.with_state(store.clone());
let html_routes = OpenApiRouter::new()
.route("/", get(|| async { Redirect::permanent("/recent") }))
.routes(routes!(html::recent::handler))
.routes(routes!(html::books::handler_init))
.routes(routes!(html::books::handler))
.routes(routes!(html::books::download))
.routes(routes!(html::series::handler_init))
.routes(routes!(html::series::handler))
.routes(routes!(html::series::single))
.routes(routes!(html::authors::handler_init))
.routes(routes!(html::authors::handler))
.routes(routes!(html::authors::single))
.routes(routes!(html::cover::thumbnail))
.routes(routes!(html::cover::full))
.routes(routes!(html::search::handler))
.routes(routes!(html::archive::handler))
.routes(routes!(static_files::handler))
.layer(TraceLayer::new_for_http())
.with_state(store.clone());
OpenApiRouter::new()
.merge(html_routes)
.nest("/opds", opds_routes)
}

View file

@ -0,0 +1,28 @@
use std::path::Path;
use calibre_db::{calibre::Calibre, data::error::DataStoreError};
use snafu::{ResultExt, Snafu};
use crate::data::book::Book;
#[derive(Debug, Snafu)]
pub enum SearchQueryError {
#[snafu(display("Failed to search for books."))]
Db { source: DataStoreError },
}
pub async fn query(
query: &str,
calibre: &Calibre,
library_path: &Path,
) -> Result<Vec<Book>, SearchQueryError> {
let books = calibre
.search(query)
.context(DbSnafu)?
.iter()
.filter_map(|book| Book::full_book(book, calibre, library_path))
.collect();
Ok(books)
}

View file

@ -0,0 +1,32 @@
use std::path::Path;
use calibre_db::{
calibre::Calibre,
data::{error::DataStoreError, series::Series},
};
use snafu::{ResultExt, Snafu};
use crate::data::book::Book;
#[derive(Debug, Snafu)]
pub enum SingleSeriesError {
#[snafu(display("Failed to fetch series data."))]
SeriesData { source: DataStoreError },
#[snafu(display("Failed to fetch books in series."))]
BookData { source: DataStoreError },
}
pub async fn single(
id: u64,
calibre: &Calibre,
library_path: &Path,
) -> Result<(Series, Vec<Book>), SingleSeriesError> {
let series = calibre.scalar_series(id).context(SeriesDataSnafu)?;
let books = calibre.series_books(id).context(BookDataSnafu)?;
let books = books
.iter()
.filter_map(|x| Book::full_book(x, calibre, library_path))
.collect::<Vec<Book>>();
Ok((series, books))
}

View file

@ -0,0 +1,48 @@
use axum::{
http::{StatusCode, Uri, header},
response::{IntoResponse, Response},
};
use rust_embed::RustEmbed;
/// Embedd static files.
#[derive(RustEmbed)]
#[folder = "static"]
pub struct Files;
/// Get static file from the 'static' folder.
#[utoipa::path(
get,
path = "/static/{*file}",
responses(
(status = 200, description = "Static file"),
(status = 404, description = "No such file within 'static'", body = String)
)
)]
pub async fn handler(uri: Uri) -> impl IntoResponse {
let mut path = uri.path().trim_start_matches('/').to_string();
if path.starts_with("static/") {
path = path.replace("static/", "");
}
StaticFile(path)
}
pub struct StaticFile<T>(pub T);
impl<T> IntoResponse for StaticFile<T>
where
T: Into<String>,
{
fn into_response(self) -> Response {
let path = self.0.into();
match Files::get(path.as_str()) {
Some(content) => {
let mime = mime_guess::from_path(path).first_or_octet_stream();
([(header::CONTENT_TYPE, mime.as_ref())], content.data).into_response()
}
None => (StatusCode::NOT_FOUND, "404 Not Found").into_response(),
}
}
}

View file

@ -1,43 +1,38 @@
//! Handle caching of files, specifically book covers. //! Handle caching of files, specifically book covers.
use std::{ use std::{
fmt,
fs::{self, File}, fs::{self, File},
io,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use sha2::{ use sha2::{
digest::{generic_array::GenericArray, typenum::U32},
Digest, Sha256, Digest, Sha256,
digest::{generic_array::GenericArray, typenum::U32},
}; };
use snafu::{ResultExt, Snafu};
use std::fmt::Write; use std::fmt::Write;
use thiserror::Error;
use tracing::debug; use tracing::debug;
/// Errors from dealing with file caching. /// Errors from converting a hash to its string representation.
#[derive(Error, Debug)] #[derive(Debug, Snafu)]
pub enum CacheError { pub enum HashToPathError {
/// Error converting a hash to its string representation. #[snafu(display("Failed to generate string representation of hash."))]
#[error("failed to access thumbnail")] ToString { source: fmt::Error },
HashError(#[from] std::fmt::Error),
/// Error creating a thumbnail for an image..
#[error("failed to create thumbnail")]
ImageError(#[from] image::ImageError),
/// Error accessing a thumbnail.
#[error("failed to access thumbnail")]
ThumbnailAccessError(#[from] std::io::Error),
/// Error accessing thumbnail directories.
#[error("failed to access thumbnail directory")]
ThumbnailPathError(PathBuf),
} }
/// Convert a hash into its path representation inside the cache directory. /// Convert a hash into its path representation inside the cache directory.
/// ///
/// First hash character is the top folder, second character the second level folder and the rest /// First hash character is the top folder, second character the second level folder and the rest
/// is the filename. /// is the filename.
fn hash_to_path(hash: GenericArray<u8, U32>, cache_path: &Path) -> Result<PathBuf, CacheError> { fn hash_to_path(
hash: GenericArray<u8, U32>,
cache_path: &Path,
) -> Result<PathBuf, HashToPathError> {
let mut hash_string = String::new(); let mut hash_string = String::new();
for byte in hash { for byte in hash {
write!(&mut hash_string, "{:02x}", byte)?; write!(&mut hash_string, "{:02x}", byte).context(ToStringSnafu)?;
} }
let hash = hash_string; let hash = hash_string;
@ -51,37 +46,78 @@ fn hash_to_path(hash: GenericArray<u8, U32>, cache_path: &Path) -> Result<PathBu
.join(remaining_segment)) .join(remaining_segment))
} }
/// Errors from creating a thumbnail.
#[derive(Debug, Snafu)]
pub enum CreateThumbnailError {
#[snafu(display("No parent for {path}."))]
ParentDir { path: String },
#[snafu(display("Failed to create thumbnail path {path}."))]
ThumbnailDir { source: io::Error, path: String },
#[snafu(display("Failed to open image {path}."))]
ImageOpen {
source: image::ImageError,
path: String,
},
#[snafu(display("Failed to save image to path {path}."))]
ImageSave {
source: image::ImageError,
path: String,
},
}
/// Create a thumbnail for `cover_path` at `thumbnail_path`. /// Create a thumbnail for `cover_path` at `thumbnail_path`.
fn create_thumbnail(cover_path: &Path, thumbnail_path: &Path) -> Result<(), CacheError> { fn create_thumbnail(cover_path: &Path, thumbnail_path: &Path) -> Result<(), CreateThumbnailError> {
debug!("creating thumbnail for {}", cover_path.to_string_lossy()); debug!("creating thumbnail for {}", cover_path.to_string_lossy());
let folders = thumbnail_path let folders = thumbnail_path.parent().ok_or_else(|| {
.parent() ParentDirSnafu {
.ok_or_else(|| CacheError::ThumbnailPathError(thumbnail_path.to_path_buf()))?; path: thumbnail_path.to_string_lossy(),
fs::create_dir_all(folders)?; }
.build()
})?;
fs::create_dir_all(folders).context(ThumbnailDirSnafu {
path: folders.to_string_lossy(),
})?;
const THUMBNAIL_SIZE: u32 = 512; const THUMBNAIL_SIZE: u32 = 512;
let img = image::open(cover_path)?; let img = image::open(cover_path).context(ImageOpenSnafu {
path: cover_path.to_string_lossy(),
})?;
let thumbnail = img.thumbnail(THUMBNAIL_SIZE, THUMBNAIL_SIZE); let thumbnail = img.thumbnail(THUMBNAIL_SIZE, THUMBNAIL_SIZE);
thumbnail.save_with_format(thumbnail_path, image::ImageFormat::Jpeg)?; thumbnail
.save_with_format(thumbnail_path, image::ImageFormat::Jpeg)
.context(ImageSaveSnafu {
path: thumbnail_path.to_string_lossy(),
})?;
debug!("saved thumbnail to {}", thumbnail_path.to_string_lossy()); debug!("saved thumbnail to {}", thumbnail_path.to_string_lossy());
Ok(()) Ok(())
} }
/// Errors from retrieving a thumbnail.
#[derive(Debug, Snafu)]
pub enum RetrieveThumbnailError {
#[snafu(display("Failed to convert hash to string."))]
HashToPath { source: HashToPathError },
#[snafu(display("Failed to create not yet existing thumbnail."))]
CreateThumbnail { source: CreateThumbnailError },
#[snafu(display("Failed to open thumbnail."))]
OpenThumbnail { source: io::Error },
}
/// Get the thumbnail for a book cover. /// Get the thumbnail for a book cover.
/// ///
/// If a thumbnail does not yet exist, create it. /// If a thumbnail does not yet exist, create it.
pub fn get_thumbnail(cover_path: &Path, cache_path: &Path) -> Result<File, CacheError> { pub fn get_thumbnail(cover_path: &Path, cache_path: &Path) -> Result<File, RetrieveThumbnailError> {
let path_str = cover_path.to_string_lossy(); let path_str = cover_path.to_string_lossy();
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(path_str.as_bytes()); hasher.update(path_str.as_bytes());
let hash = hasher.finalize(); let hash = hasher.finalize();
let thumbnail_path = hash_to_path(hash, cache_path)?; let thumbnail_path = hash_to_path(hash, cache_path).context(HashToPathSnafu)?;
if !thumbnail_path.exists() { if !thumbnail_path.exists() {
create_thumbnail(cover_path, &thumbnail_path)?; create_thumbnail(cover_path, &thumbnail_path).context(CreateThumbnailSnafu)?;
} }
Ok(File::open(thumbnail_path)?) File::open(thumbnail_path).context(OpenThumbnailSnafu)
} }

View file

@ -7,26 +7,27 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use thiserror::Error;
use tracing::info; use tracing::info;
use crate::cli::Cli; use crate::cli::Cli;
use snafu::{ResultExt, Snafu};
/// Errors when dealing with application configuration. /// Errors from loading application configuration.
#[derive(Error, Debug)] #[derive(Debug, Snafu)]
pub enum ConfigError { pub enum LoadError {
/// Calibre library path does not exist. #[snafu(display("{path} is not a calibre library."))]
#[error("no folder at {0}")] LibraryPath { path: String },
LibraryPathNotFound(String), #[snafu(display("Could not find calibre metadata at {path}."))]
/// Calibre database does not exist. MetadataPath { path: String },
#[error("no metadata.db in {0}")] #[snafu(display("Invalid listening address {listen_address}."))]
MetadataNotFound(String), ListeningAddressParse {
/// Error converting a string to a listening address. source: io::Error,
#[error("failed to convert into listening address")] listen_address: String,
ListeningAddressError(String), },
/// Error accessing the configured cache path. #[snafu(display("Invalid listening address {listen_address}."))]
#[error("failed to access cache path")] ListeningAddress { listen_address: String },
CachePathError(#[from] io::Error), #[snafu(display("Failed to create cach directory at {path}."))]
CacheDir { source: io::Error, path: String },
} }
/// Application configuration. /// Application configuration.
@ -44,7 +45,7 @@ pub struct Config {
impl Config { impl Config {
/// Check if the calibre library from `args` exists and if the calibre database can be found. /// Check if the calibre library from `args` exists and if the calibre database can be found.
pub fn load(args: &Cli) -> Result<Self, ConfigError> { pub fn load(args: &Cli) -> Result<Self, LoadError> {
let library_path = Path::new(&args.library_path).to_path_buf(); let library_path = Path::new(&args.library_path).to_path_buf();
if !library_path.exists() { if !library_path.exists() {
@ -53,7 +54,7 @@ impl Config {
.to_str() .to_str()
.unwrap_or("<failed to parse path>") .unwrap_or("<failed to parse path>")
.to_string(); .to_string();
return Err(ConfigError::LibraryPathNotFound(library_path)); return LibraryPathSnafu { path: library_path }.fail();
} }
let metadata_path = library_path.join("metadata.db"); let metadata_path = library_path.join("metadata.db");
@ -63,18 +64,24 @@ impl Config {
.to_str() .to_str()
.unwrap_or("<failed to parse path>") .unwrap_or("<failed to parse path>")
.to_string(); .to_string();
return Err(ConfigError::MetadataNotFound(metadata_path)); return MetadataPathSnafu {
path: metadata_path,
}
.fail();
} }
let listen_address = args let listen_address = args
.listen_address .listen_address
.to_socket_addrs() .to_socket_addrs()
.map_err(|e| { .context(ListeningAddressParseSnafu {
ConfigError::ListeningAddressError(format!("{}: {e:?}", args.listen_address)) listen_address: args.listen_address.clone(),
})? })?
.next() .next()
.ok_or(ConfigError::ListeningAddressError( .ok_or(
args.listen_address.clone(), ListeningAddressSnafu {
))?; listen_address: args.listen_address.clone(),
}
.build(),
)?;
let cache_path = if args.cache_path.starts_with("$TMP") { let cache_path = if args.cache_path.starts_with("$TMP") {
let cache_base = env::var("XDG_CACHE_HOME") let cache_base = env::var("XDG_CACHE_HOME")
@ -83,7 +90,9 @@ impl Config {
} else { } else {
PathBuf::from(&args.cache_path) PathBuf::from(&args.cache_path)
}; };
fs::create_dir_all(&cache_path)?; fs::create_dir_all(&cache_path).context(CacheDirSnafu {
path: cache_path.to_string_lossy(),
})?;
info!("Using {} for cache", cache_path.to_string_lossy()); info!("Using {} for cache", cache_path.to_string_lossy());
Ok(Self { Ok(Self {

View file

@ -0,0 +1 @@
pub mod book;

View file

@ -2,13 +2,12 @@
use std::{collections::HashMap, fmt::Display, path::Path}; use std::{collections::HashMap, fmt::Display, path::Path};
use calibre_db::data::{ use calibre_db::{
author::Author as DbAuthor, book::Book as DbBook, series::Series as DbSeries, calibre::Calibre,
data::{author::Author as DbAuthor, book::Book as DbBook, series::Series as DbSeries},
}; };
use serde::Serialize; use serde::Serialize;
use crate::app_state::AppState;
/// Wrapper type for a file format string (must be a struct in order to implement traits). /// Wrapper type for a file format string (must be a struct in order to implement traits).
#[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)]
pub struct Format(pub String); pub struct Format(pub String);
@ -92,10 +91,10 @@ impl Book {
/// Wrap a [`DbBook`](struct@calibre_db::data::book::Book) in a [`Book`](struct@Book) by /// Wrap a [`DbBook`](struct@calibre_db::data::book::Book) in a [`Book`](struct@Book) by
/// fetching additional information about author, formats and series. /// fetching additional information about author, formats and series.
pub fn full_book(book: &DbBook, state: &AppState) -> Option<Book> { pub fn full_book(book: &DbBook, calibre: &Calibre, library_path: &Path) -> Option<Book> {
let formats = Book::formats(book, &state.config.library_path); let formats = Book::formats(book, library_path);
let author = state.calibre.book_author(book.id).ok()?; let author = calibre.book_author(book.id).ok()?;
let series = state.calibre.book_series(book.id).ok()?; let series = calibre.book_series(book.id).ok()?;
Some(Book::from_db_book(book, series, author, formats)) Some(Book::from_db_book(book, series, author, formats))
} }
} }

View file

@ -1,38 +0,0 @@
//! Handle requests for a single author.
use std::sync::Arc;
use calibre_db::data::pagination::SortOrder;
use poem::{
handler,
web::{Data, Path},
Response,
};
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
/// Handle a request for an author with `id` and decide whether to render to html or OPDS.
#[handler]
pub async fn handler(
id: Path<u64>,
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
let author = state
.calibre
.scalar_author(*id)
.map_err(HandlerError::DataError)?;
let books = state
.calibre
.author_books(*id, u32::MAX.into(), None, SortOrder::ASC)
.map_err(HandlerError::DataError)?;
let books = books
.iter()
.filter_map(|x| Book::full_book(x, &state))
.collect::<Vec<Book>>();
match accept.0 {
Accept::Html => crate::handlers::html::author::handler(author, books).await,
Accept::Opds => crate::handlers::opds::author::handler(author, books).await,
}
}

View file

@ -1,44 +0,0 @@
//! Handle requests for multiple authors.
use std::sync::Arc;
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
use poem::{
handler,
web::{Data, Path},
Response,
};
use crate::{app_state::AppState, Accept};
/// Handle a request for multiple authors, starting at the first.
#[handler]
pub async fn handler_init(
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
authors(&accept, &state.calibre, None, &SortOrder::ASC).await
}
/// Handle a request for multiple authors, starting at the `cursor` and going in the direction of
/// `sort_order`.
#[handler]
pub async fn handler(
Path((cursor, sort_order)): Path<(String, SortOrder)>,
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
authors(&accept, &state.calibre, Some(&cursor), &sort_order).await
}
async fn authors(
acccept: &Accept,
calibre: &Calibre,
cursor: Option<&str>,
sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
match acccept {
Accept::Html => crate::handlers::html::authors::handler(calibre, cursor, sort_order).await,
Accept::Opds => crate::handlers::opds::authors::handler(calibre, cursor, sort_order).await,
}
}

View file

@ -1,77 +0,0 @@
//! Handle requests for multiple books.
use std::sync::Arc;
use calibre_db::data::pagination::SortOrder;
use poem::{
error::NotFoundError,
handler,
web::{Data, Path},
Response,
};
use tokio::fs::File;
use crate::{
app_state::AppState,
data::book::{Book, Format},
handlers::error::HandlerError,
opds::media_type::MediaType,
Accept,
};
/// Handle a request for multiple books, starting at the first.
#[handler]
pub async fn handler_init(
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
books(&accept, &state, None, &SortOrder::ASC).await
}
/// Handle a request for multiple books, starting at the `cursor` and going in the direction of
/// `sort_order`.
#[handler]
pub async fn handler(
Path((cursor, sort_order)): Path<(String, SortOrder)>,
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
books(&accept, &state, Some(&cursor), &sort_order).await
}
/// Handle a request for a book with id `id` in format `format`.
#[handler]
pub async fn handler_download(
Path((id, format)): Path<(u64, String)>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
let book = state
.calibre
.scalar_book(id)
.map_err(HandlerError::DataError)?;
let book = Book::full_book(&book, &state).ok_or(NotFoundError)?;
let format = Format(format);
let file_name = book.formats.get(&format).ok_or(NotFoundError)?;
let file_path = state
.config
.library_path
.join(book.data.path)
.join(file_name);
let mut file = File::open(file_path).await.map_err(|_| NotFoundError)?;
let content_type: MediaType = format.into();
let content_type = format!("{content_type}");
crate::handlers::download::handler(file_name, file, &content_type).await
}
async fn books(
accept: &Accept,
state: &Arc<AppState>,
cursor: Option<&str>,
sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
match accept {
Accept::Html => crate::handlers::html::books::handler(state, cursor, sort_order).await,
Accept::Opds => crate::handlers::opds::books::handler(state, cursor, sort_order).await,
}
}

View file

@ -1,74 +0,0 @@
//! Handle requests for cover images.
use std::{fs::File, path::Path as FilePath, sync::Arc};
use crate::{
app_state::AppState,
cache::{self, CacheError},
config::Config,
handlers::error::HandlerError,
};
use calibre_db::calibre::Calibre;
use poem::{
error::NotFoundError,
handler,
web::{headers::ContentType, Data, Path},
Response,
};
use thiserror::Error;
use tokio::fs::File as AsyncFile;
/// Errors from fetching cover images.
#[derive(Error, Debug)]
pub enum CoverError {
/// Error fetching a cover thumbnail.
#[error("failed to access thumbnail")]
ThumbnailError(#[from] CacheError),
/// Error fetching a full cover.
#[error("failed access cover")]
FullCoverError(#[from] std::io::Error),
}
/// Handle a request for the cover thumbnail of book with id `id`.
#[handler]
pub async fn handler_thumbnail(
id: Path<u64>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
cover(
&state.calibre,
&state.config,
*id,
|cover_path, cache_path| Ok(cache::get_thumbnail(cover_path, cache_path)?),
)
.await
}
/// Handle a request for the cover image of book with id `id`.
#[handler]
pub async fn handler_full(
id: Path<u64>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
cover(&state.calibre, &state.config, *id, |cover_path, _| {
Ok(File::open(cover_path)?)
})
.await
}
async fn cover<F>(
calibre: &Calibre,
config: &Config,
id: u64,
f: F,
) -> Result<Response, poem::Error>
where
F: Fn(&FilePath, &FilePath) -> Result<File, CoverError>,
{
let book = calibre.scalar_book(id).map_err(HandlerError::DataError)?;
let cover_path = config.library_path.join(book.path).join("cover.jpg");
let cover = f(&cover_path, &config.cache_path).map_err(|_| NotFoundError)?;
let cover = AsyncFile::from_std(cover);
crate::handlers::download::handler("cover.jpg", cover, &ContentType::jpeg().to_string()).await
}

View file

@ -1,23 +0,0 @@
//! Handle requests for specific formats of a book.
use tokio::io::AsyncRead;
use poem::{Body, IntoResponse, Response};
use tokio_util::io::ReaderStream;
/// Handle a request for file.
///
/// Must not be used directly from a route as that makes it vulnerable to path traversal attacks.
pub async fn handler<A: AsyncRead + Send + 'static>(
file_name: &str,
reader: A,
content_type: &str,
) -> Result<Response, poem::Error> {
let stream = ReaderStream::new(reader);
let body = Body::from_bytes_stream(stream);
Ok(body
.with_content_type(content_type)
.with_header("Content-Disposition", format!("filename=\"{file_name}\""))
.into_response())
}

View file

@ -1,57 +0,0 @@
//! Error handling for requests handlers.
use calibre_db::data::error::DataStoreError;
use poem::{error::ResponseError, http::StatusCode, Body, Response};
use thiserror::Error;
use tracing::error;
use uuid::Uuid;
use crate::opds::error::OpdsError;
/// Errors happening during handling of requests.
#[derive(Error, Debug)]
#[error("opds error")]
pub enum HandlerError {
/// Error rendering OPDS.
#[error("opds error")]
OpdsError(#[from] OpdsError),
/// Error fetching data from calibre.
#[error("data error")]
DataError(#[from] DataStoreError),
}
/// Convert a [`HandlerError`](enum@HandlerError) into a suitable response error.
///
/// Log the real error (internal) with an uuid and send a suitable error message to the user with
/// the same uuid (for correlation purposes).
impl ResponseError for HandlerError {
fn status(&self) -> StatusCode {
match &self {
HandlerError::OpdsError(_) => StatusCode::INTERNAL_SERVER_ERROR,
HandlerError::DataError(e) => match e {
DataStoreError::NoResults(_) => StatusCode::NOT_FOUND,
_ => StatusCode::INTERNAL_SERVER_ERROR,
},
}
}
fn as_response(&self) -> Response {
let id = Uuid::new_v4();
let internal_msg = format!("{:?}", self);
let external_msg = match &self {
HandlerError::OpdsError(_) => "internal server error",
HandlerError::DataError(e) => match e {
DataStoreError::NoResults(_) => "item not found",
_ => "internal server error",
},
};
error!("{id}: {internal_msg}");
let body = Body::from_json(serde_json::json!({
"id": id.to_string(),
"message": external_msg,
}))
.unwrap();
Response::builder().status(self.status()).body(body)
}
}

View file

@ -1,21 +0,0 @@
//! Handle a single author for html.
use calibre_db::data::author::Author;
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
use tera::Context;
use crate::{data::book::Book, templates::TEMPLATES};
/// Render a single author in html.
pub async fn handler(author: Author, books: Vec<Book>) -> Result<Response, poem::Error> {
let mut context = Context::new();
context.insert("title", &author.name);
context.insert("nav", "authors");
context.insert("books", &books);
Ok(TEMPLATES
.render("book_list", &context)
.map_err(InternalServerError)
.map(Html)?
.into_response())
}

View file

@ -1,21 +0,0 @@
//! Handle multiple authors in html.
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
use poem::Response;
use crate::handlers::paginated;
/// Render all authors paginated by cursor in html.
pub async fn handler(
calibre: &Calibre,
cursor: Option<&str>,
sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
paginated::render(
"authors",
|| calibre.authors(25, cursor, sort_order),
|author| author.sort.clone(),
|cursor| calibre.has_previous_authors(cursor),
|cursor| calibre.has_more_authors(cursor),
)
}

View file

@ -1,26 +0,0 @@
//! Handle multiple books in html.
use calibre_db::data::pagination::SortOrder;
use poem::Response;
use crate::{app_state::AppState, data::book::Book, handlers::paginated};
/// Render all books paginated by cursor in html.
pub async fn handler(
state: &AppState,
cursor: Option<&str>,
sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
paginated::render(
"books",
|| {
state
.calibre
.books(25, cursor, sort_order)
.map(|x| x.iter().filter_map(|y| Book::full_book(y, state)).collect())
},
|book| book.data.sort.clone(),
|cursor| state.calibre.has_previous_books(cursor),
|cursor| state.calibre.has_more_books(cursor),
)
}

View file

@ -1,20 +0,0 @@
//! Handle recent books in html.
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
use tera::Context;
use crate::{data::book::Book, templates::TEMPLATES};
/// Render recent books as html.
pub async fn handler(recent_books: Vec<Book>) -> Result<Response, poem::Error> {
let mut context = Context::new();
context.insert("title", "");
context.insert("nav", "recent");
context.insert("books", &recent_books);
Ok(TEMPLATES
.render("book_list", &context)
.map_err(InternalServerError)
.map(Html)?
.into_response())
}

View file

@ -1,20 +0,0 @@
//! Handle search results in html.
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
use tera::Context;
use crate::{data::book::Book, templates::TEMPLATES};
/// Render all search results as html.
pub async fn handler(books: Vec<Book>) -> Result<Response, poem::Error> {
let mut context = Context::new();
context.insert("title", "Search Results");
context.insert("nav", "search");
context.insert("books", &books);
Ok(TEMPLATES
.render("book_list", &context)
.map_err(InternalServerError)
.map(Html)?
.into_response())
}

View file

@ -1,21 +0,0 @@
//! Handle multiple series in html.
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
use poem::Response;
use crate::handlers::paginated;
/// Render all series paginated by cursor as html.
pub async fn handler(
calibre: &Calibre,
cursor: Option<&str>,
sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
paginated::render(
"series",
|| calibre.series(25, cursor, sort_order),
|series| series.sort.clone(),
|cursor| calibre.has_previous_series(cursor),
|cursor| calibre.has_more_series(cursor),
)
}

View file

@ -1,21 +0,0 @@
//! Handle a single series in html.
use calibre_db::data::series::Series;
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
use tera::Context;
use crate::{data::book::Book, templates::TEMPLATES};
/// Render a single series as html.
pub async fn handler(series: Series, books: Vec<Book>) -> Result<Response, poem::Error> {
let mut context = Context::new();
context.insert("title", &series.name);
context.insert("nav", "series");
context.insert("books", &books);
Ok(TEMPLATES
.render("book_list", &context)
.map_err(InternalServerError)
.map(Html)?
.into_response())
}

View file

@ -1,39 +0,0 @@
//! Handle a single author for opds.
use calibre_db::data::author::Author;
use poem::{IntoResponse, Response};
use time::OffsetDateTime;
use crate::{
data::book::Book,
handlers::error::HandlerError,
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
APP_NAME,
};
/// Render a single author as an OPDS entry embedded in a feed.
pub async fn handler(author: Author, books: Vec<Book>) -> Result<Response, poem::Error> {
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: format!("/opds/authors/{}", author.id),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}author:{}", author.id),
&author.name,
self_link,
vec![],
entries,
);
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml
.with_content_type("application/atom+xml")
.into_response())
}

View file

@ -1,49 +0,0 @@
//! Handle multiple authors for opds.
use calibre_db::{
calibre::Calibre,
data::{author::Author as DbAuthor, pagination::SortOrder},
};
use poem::{IntoResponse, Response};
use time::OffsetDateTime;
use crate::{
handlers::error::HandlerError,
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
APP_NAME,
};
/// Render all authors as OPDS entries embedded in a feed.
pub async fn handler(
calibre: &Calibre,
_cursor: Option<&str>,
_sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
let authors: Vec<DbAuthor> = calibre
.authors(u32::MAX.into(), None, &SortOrder::ASC)
.map_err(HandlerError::DataError)?;
let entries: Vec<Entry> = authors.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/authors".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:authors"),
"All Authors",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml
.with_content_type("application/atom+xml")
.into_response())
}

View file

@ -1,54 +0,0 @@
//! Handle multiple books for opds.
use calibre_db::data::pagination::SortOrder;
use poem::{IntoResponse, Response};
use time::OffsetDateTime;
use crate::{
app_state::AppState,
data::book::Book,
handlers::error::HandlerError,
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
APP_NAME,
};
/// Render all books as OPDS entries embedded in a feed.
pub async fn handler(
state: &AppState,
_cursor: Option<&str>,
_sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
let books: Vec<Book> = state
.calibre
.books(u32::MAX.into(), None, &SortOrder::ASC)
.map(|x| x.iter().filter_map(|y| Book::full_book(y, state)).collect())
.map_err(HandlerError::DataError)?;
render_books(books).await
}
/// Render a list of books as OPDS entries in a feed.
pub(crate) async fn render_books(books: Vec<Book>) -> Result<Response, poem::Error> {
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/books".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:books"),
"All Books",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml
.with_content_type("application/atom+xml")
.into_response())
}

View file

@ -1,110 +0,0 @@
//! Handle the OPDS root feed.
use poem::{handler, web::WithContentType, IntoResponse};
use time::OffsetDateTime;
use crate::{
handlers::error::HandlerError,
opds::{
content::Content, entry::Entry, feed::Feed, link::Link, media_type::MediaType,
relation::Relation,
},
APP_NAME,
};
/// Render a root OPDS feed with links to the subsections (authors, books, series and recent).
#[handler]
pub async fn handler() -> Result<WithContentType<String>, poem::Error> {
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let books_entry = Entry {
title: "Books".to_string(),
id: format!("{APP_NAME}:books"),
updated: now,
content: Some(Content {
media_type: MediaType::Text,
content: "Index of all books".to_string(),
}),
author: None,
links: vec![Link {
href: "/opds/books".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Subsection,
title: None,
count: None,
}],
};
let authors_entry = Entry {
title: "Authors".to_string(),
id: format!("{APP_NAME}:authors"),
updated: now,
content: Some(Content {
media_type: MediaType::Text,
content: "Index of all authors".to_string(),
}),
author: None,
links: vec![Link {
href: "/opds/authors".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Subsection,
title: None,
count: None,
}],
};
let series_entry = Entry {
title: "Series".to_string(),
id: format!("{APP_NAME}:series"),
updated: now,
content: Some(Content {
media_type: MediaType::Text,
content: "Index of all series".to_string(),
}),
author: None,
links: vec![Link {
href: "/opds/series".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Subsection,
title: None,
count: None,
}],
};
let recents_entry = Entry {
title: "Recent Additions".to_string(),
id: format!("{APP_NAME}:recentbooks"),
updated: now,
content: Some(Content {
media_type: MediaType::Text,
content: "Recently added books".to_string(),
}),
author: None,
links: vec![Link {
href: "/opds/recent".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Subsection,
title: None,
count: None,
}],
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:catalog"),
"Little Hesinde",
self_link,
vec![],
vec![authors_entry, series_entry, books_entry, recents_entry],
);
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml.with_content_type("application/atom+xml"))
}

View file

@ -1,38 +0,0 @@
//! Handle recent books for OPDS.
use poem::{IntoResponse, Response};
use time::OffsetDateTime;
use crate::{
data::book::Book,
handlers::error::HandlerError,
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
APP_NAME,
};
/// Render recent books as OPDS entries embedded in a feed.
pub async fn handler(recent_books: Vec<Book>) -> Result<Response, poem::Error> {
let entries: Vec<Entry> = recent_books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/recent".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:recentbooks"),
"Recent Books",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml
.with_content_type("application/atom+xml")
.into_response())
}

View file

@ -1,12 +0,0 @@
//! Handle search results in opds.
use poem::Response;
use crate::data::book::Book;
use super::books::render_books;
/// Render search results as OPDS entries in a feed.
pub async fn handler(books: Vec<Book>) -> Result<Response, poem::Error> {
render_books(books).await
}

View file

@ -1,27 +0,0 @@
//! Handle open search description..
use crate::{
handlers::error::HandlerError,
opds::search::{OpenSearchDescription, Url},
APP_NAME,
};
use poem::{handler, IntoResponse, Response};
/// Render search information as open search description.
#[handler]
pub async fn handler() -> Result<Response, poem::Error> {
let search = OpenSearchDescription {
short_name: APP_NAME.to_string(),
description: "Search for ebooks".to_string(),
input_encoding: "UTF-8".to_string(),
output_encoding: "UTF-8".to_string(),
url: Url {
type_name: "application/atom+xml".to_string(),
template: "/opds/search?query={searchTerms}".to_string(),
},
};
let xml = search.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml
.with_content_type("application/atom+xml")
.into_response())
}

View file

@ -1,46 +0,0 @@
//! Handle multiple series for OPDS.
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
use poem::{IntoResponse, Response};
use time::OffsetDateTime;
use crate::{
handlers::error::HandlerError,
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
APP_NAME,
};
/// Render all series as OPDS entries embedded in a feed.
pub async fn handler(
calibre: &Calibre,
_cursor: Option<&str>,
_sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
let series = calibre
.series(u32::MAX.into(), None, &SortOrder::ASC)
.map_err(HandlerError::DataError)?;
let entries: Vec<Entry> = series.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: "/opds/series".to_string(),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:series"),
"All Series",
self_link,
vec![],
entries,
);
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml
.with_content_type("application/atom+xml")
.into_response())
}

View file

@ -1,39 +0,0 @@
//! Handle a single series for opds.
use calibre_db::data::series::Series;
use poem::{IntoResponse, Response};
use time::OffsetDateTime;
use crate::{
data::book::Book,
handlers::error::HandlerError,
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
APP_NAME,
};
/// Render a single series as an OPDS entry embedded in a feed.
pub async fn handler(series: Series, books: Vec<Book>) -> Result<Response, poem::Error> {
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
let now = OffsetDateTime::now_utc();
let self_link = Link {
href: format!("/opds/series/{}", series.id),
media_type: MediaType::Navigation,
rel: Relation::Myself,
title: None,
count: None,
};
let feed = Feed::create(
now,
&format!("{APP_NAME}:series:{}", series.id),
&series.name,
self_link,
vec![],
entries,
);
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
Ok(xml
.with_content_type("application/atom+xml")
.into_response())
}

View file

@ -1,28 +0,0 @@
//! Handle requests for recent books.
use std::sync::Arc;
use poem::{handler, web::Data, Response};
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
/// Handle a request recent books and decide whether to render to html or OPDS.
#[handler]
pub async fn handler(
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
let recent_books = state
.calibre
.recent_books(25)
.map_err(HandlerError::DataError)?;
let recent_books = recent_books
.iter()
.filter_map(|x| Book::full_book(x, &state))
.collect::<Vec<Book>>();
match accept.0 {
Accept::Html => crate::handlers::html::recent::handler(recent_books).await,
Accept::Opds => crate::handlers::opds::recent::handler(recent_books).await,
}
}

View file

@ -1,38 +0,0 @@
//! Handle search requests.
use std::sync::Arc;
use poem::{
handler,
web::{Data, Query},
Response,
};
use serde::Deserialize;
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
#[derive(Deserialize)]
struct Params {
/// Query for a search request.
query: String,
}
/// Handle a search request with query parameter `query`.
#[handler]
pub async fn handler(
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
Query(params): Query<Params>,
) -> Result<Response, poem::Error> {
let books = state
.calibre
.search(&params.query)
.map_err(HandlerError::DataError)?
.iter()
.filter_map(|book| Book::full_book(book, *state))
.collect();
match *accept {
Accept::Html => crate::handlers::html::search::handler(books).await,
Accept::Opds => crate::handlers::opds::search::handler(books).await,
}
}

View file

@ -1,48 +0,0 @@
//! Handle requests for multiple series.
use std::sync::Arc;
use calibre_db::data::pagination::SortOrder;
use poem::{
handler,
web::{Data, Path},
Response,
};
use crate::{app_state::AppState, Accept};
/// Handle a request for multiple series, starting at the first.
#[handler]
pub async fn handler_init(
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
series(&accept, &state, None, &SortOrder::ASC).await
}
/// Handle a request for multiple series, starting at the `cursor` and going in the direction of
/// `sort_order`.
#[handler]
pub async fn handler(
Path((cursor, sort_order)): Path<(String, SortOrder)>,
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
series(&accept, &state, Some(&cursor), &sort_order).await
}
async fn series(
accept: &Accept,
state: &Arc<AppState>,
cursor: Option<&str>,
sort_order: &SortOrder,
) -> Result<Response, poem::Error> {
match accept {
Accept::Html => {
crate::handlers::html::series::handler(&state.calibre, cursor, sort_order).await
}
Accept::Opds => {
crate::handlers::opds::series::handler(&state.calibre, cursor, sort_order).await
}
}
}

View file

@ -1,37 +0,0 @@
//! Handle requests for a single series.
use std::sync::Arc;
use poem::{
handler,
web::{Data, Path},
Response,
};
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
/// Handle a request for a series with `id` and decide whether to render to html or OPDS.
#[handler]
pub async fn handler(
id: Path<u64>,
accept: Data<&Accept>,
state: Data<&Arc<AppState>>,
) -> Result<Response, poem::Error> {
let series = state
.calibre
.scalar_series(*id)
.map_err(HandlerError::DataError)?;
let books = state
.calibre
.series_books(*id)
.map_err(HandlerError::DataError)?;
let books = books
.iter()
.filter_map(|x| Book::full_book(x, &state))
.collect::<Vec<Book>>();
match accept.0 {
Accept::Html => crate::handlers::html::series_single::handler(series, books).await,
Accept::Opds => crate::handlers::opds::series_single::handler(series, books).await,
}
}

View file

@ -1,11 +0,0 @@
use crate::{APP_NAME, VERSION};
use poem::{handler, Response};
const SOURCE_ARCHIVE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/archive.zip"));
/// Handle a request for source code of the server..
#[handler]
pub async fn handler() -> Result<Response, poem::Error> {
let file_name = format!("{APP_NAME}-{VERSION}.zip");
crate::handlers::download::handler(&file_name, SOURCE_ARCHIVE, "application/zip").await
}

View file

@ -2,82 +2,29 @@
//! //!
//! Shamelessly written to scratch my own itches. //! Shamelessly written to scratch my own itches.
use std::sync::Arc; use api::ServeError;
use app_state::AppState; use app_state::AppState;
use calibre_db::calibre::Calibre; use calibre_db::calibre::{Calibre, LoadError};
use config::Config; use config::Config;
use poem::{ use snafu::{ResultExt, Snafu};
endpoint::EmbeddedFilesEndpoint, get, listener::TcpListener, middleware::Tracing, EndpointExt,
Route, Server,
};
use rust_embed::RustEmbed;
use tokio::signal; use tokio::signal;
use tracing::info; use tracing::info;
pub mod api;
pub mod app_state; pub mod app_state;
pub mod cache; pub mod cache;
pub mod cli; pub mod cli;
pub mod config; pub mod config;
/// Data structs and their functions. /// Data structs and their functions.
pub mod data { pub mod data;
pub mod book;
}
/// Request handlers. Because it can not be guaranteed that a proper accept header is sent, the
/// routes are doubled and the decision on whether to render html or OPDS is made with internal
/// data on the respective routes.
pub mod handlers {
/// Handle requests for html.
pub mod html {
pub mod author;
pub mod authors;
pub mod books;
pub mod recent;
pub mod search;
pub mod series;
pub mod series_single;
}
/// Handle requests for OPDS.
pub mod opds {
pub mod author;
pub mod authors;
pub mod books;
pub mod feed;
pub mod recent;
pub mod search;
pub mod search_info;
pub mod series;
pub mod series_single;
}
pub mod author;
pub mod authors;
pub mod books;
pub mod cover;
pub mod download;
pub mod error;
pub mod paginated;
pub mod recent;
pub mod search;
pub mod series;
pub mod series_single;
pub mod source_archive;
}
/// OPDS data structs. /// OPDS data structs.
pub mod opds { pub mod opds;
pub mod author;
pub mod content;
pub mod entry;
pub mod error;
pub mod feed;
pub mod link;
pub mod media_type;
pub mod relation;
pub mod search;
}
pub mod templates; pub mod templates;
pub const APP_NAME: &str = "little-hesinde"; // App name from Cargo.toml
pub const VERSION: &str = "0.3.1"; const APP_NAME: &str = env!("CARGO_PKG_NAME");
// Version from Cargo.toml
const APP_VERSION: &str = env!("CARGO_PKG_VERSION");
/// Internal marker data in lieu of a proper `Accept` header. /// Internal marker data in lieu of a proper `Accept` header.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
@ -88,73 +35,32 @@ pub enum Accept {
Opds, Opds,
} }
/// Embedd static files. /// Errors from running little-hesinde.
#[derive(RustEmbed)] #[derive(Debug, Snafu)]
#[folder = "static"] pub enum RunError {
pub struct Files; #[snafu(display("Failed to load calibre database."))]
LoadCalibre { source: LoadError },
#[snafu(display("Failed to run http server."))]
Serve { source: ServeError },
}
/// Main entry point to run the ebook server with a calibre library specified in `config`. /// Main entry point to run the ebook server with a calibre library specified in `config`.
pub async fn run(config: Config) -> Result<(), std::io::Error> { pub async fn run(config: Config) -> Result<(), RunError> {
let calibre = Calibre::load(&config.metadata_path).expect("failed to load calibre database"); let calibre = Calibre::load(&config.metadata_path).context(LoadCalibreSnafu)?;
let app_state = Arc::new(AppState { let app_state = AppState {
calibre, calibre,
config: config.clone(), config: config.clone(),
}); };
let html_routes = Route::new() let server = api::serve(config.listen_address, app_state);
.at("/", get(handlers::recent::handler))
.at("/books", get(handlers::books::handler_init))
.at("/books/:cursor/:sort_order", get(handlers::books::handler))
.at("/series", get(handlers::series::handler_init))
.at(
"/series/:cursor/:sort_order",
get(handlers::series::handler),
)
.at("/series/:id", get(handlers::series_single::handler))
.at("/authors", get(handlers::authors::handler_init))
.at("/authors/:id", get(handlers::author::handler))
.at(
"/authors/:cursor/:sort_order",
get(handlers::authors::handler),
)
.at("/cover/:id", get(handlers::cover::handler_full))
.at(
"/cover/:id/thumbnail",
get(handlers::cover::handler_thumbnail),
)
.at("/book/:id/:format", get(handlers::books::handler_download))
.at("/archive", get(handlers::source_archive::handler))
.at("/search", get(handlers::search::handler))
.nest("/static", EmbeddedFilesEndpoint::<Files>::new())
.data(Accept::Html);
let opds_routes = Route::new()
.at("/", get(handlers::opds::feed::handler))
.at("/recent", get(handlers::recent::handler))
.at("/books", get(handlers::books::handler_init))
.at("/authors", get(handlers::authors::handler_init))
.at("/authors/:id", get(handlers::author::handler))
.at("/series", get(handlers::series::handler_init))
.at("/series/:id", get(handlers::series_single::handler))
.at("/search/info", get(handlers::opds::search_info::handler))
.at("/search", get(handlers::search::handler))
.data(Accept::Opds);
let app = Route::new()
.nest("/", html_routes)
.nest("/opds", opds_routes)
.data(app_state)
.with(Tracing);
let server = Server::new(TcpListener::bind(config.listen_address))
.name("little-hesinde")
.run(app);
tokio::select! { tokio::select! {
_ = server => {}, res = server => {
res.context(ServeSnafu)
},
_ = signal::ctrl_c() => { _ = signal::ctrl_c() => {
info!("Received Ctrl+C, shutting down..."); info!("Received Ctrl+C, shutting down...");
Ok(())
}, },
} }
Ok(())
} }

View file

@ -1,12 +1,39 @@
use clap::Parser; use clap::Parser;
use little_hesinde::{cli::Cli, config::Config}; use little_hesinde::{
RunError,
cli::Cli,
config::{Config, LoadError},
};
use snafu::{ResultExt, Snafu};
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
/// Top-level application errors.
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Failed to load config."))]
Config { source: LoadError },
#[snafu(display("Failed to run little-hesinde."))]
Run { source: RunError },
}
#[tokio::main] #[tokio::main]
async fn main() -> Result<(), std::io::Error> { #[snafu::report]
tracing_subscriber::fmt::init(); async fn main() -> Result<(), Error> {
tracing_subscriber::registry()
.with(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| {
format!(
"{}=debug,tower_http=debug,axum::rejection=trace",
env!("CARGO_CRATE_NAME")
)
.into()
}),
)
.with(tracing_subscriber::fmt::layer())
.init();
let args = Cli::parse(); let args = Cli::parse();
let config = Config::load(&args).expect("failed to load configuration"); let config = Config::load(&args).context(ConfigSnafu)?;
little_hesinde::run(config).await little_hesinde::run(config).await.context(RunSnafu)
} }

View file

@ -0,0 +1,9 @@
pub mod author;
pub mod content;
pub mod entry;
pub mod error;
pub mod feed;
pub mod link;
pub mod media_type;
pub mod relation;
pub mod search;

View file

@ -2,26 +2,18 @@
use std::{io, string::FromUtf8Error}; use std::{io, string::FromUtf8Error};
use quick_xml::DeError; use quick_xml::SeError;
use thiserror::Error; use snafu::Snafu;
/// Errors happening during handling OPDS data. /// Errors happening during handling OPDS data.
#[derive(Error, Debug)] #[derive(Debug, Snafu)]
#[error("opds error")] pub enum AsXmlError {
pub enum OpdsError { #[snafu(display("Failed to serialize object."), visibility(pub))]
/// Error serializing OPDS data. ToString { source: SeError },
#[error("failed to serialize struct")] #[snafu(display("Failed to write xml event."), visibility(pub))]
SerializingError(#[from] DeError), WriteXmlEvent { source: io::Error },
/// Error parsing OPDS xml structure. #[snafu(display("Failed to read xml event."), visibility(pub))]
#[error("xml failure")] ReadXmlEvent { source: quick_xml::Error },
XmlError(#[from] quick_xml::Error), #[snafu(display("Failed to read bytes as utf8 string."), visibility(pub))]
/// Error decoding xml as UTF-8. BytesToUtf8 { source: FromUtf8Error },
#[error("failed to decode as utf-8")]
Utf8Error(#[from] FromUtf8Error),
/// Error parsing OPDS xml structure.
#[error("xml serialization failure")]
XmlSerializationError(#[from] quick_xml::SeError),
/// Error parsing OPDS xml structure.
#[error("xml io failure")]
XmlIoError(#[from] io::Error),
} }

View file

@ -8,10 +8,15 @@ use quick_xml::{
se::to_string, se::to_string,
}; };
use serde::Serialize; use serde::Serialize;
use snafu::ResultExt;
use time::OffsetDateTime; use time::OffsetDateTime;
use super::{ use super::{
author::Author, entry::Entry, error::OpdsError, link::Link, media_type::MediaType, author::Author,
entry::Entry,
error::{AsXmlError, BytesToUtf8Snafu, ReadXmlEventSnafu, ToStringSnafu, WriteXmlEventSnafu},
link::Link,
media_type::MediaType,
relation::Relation, relation::Relation,
}; };
@ -84,14 +89,16 @@ impl Feed {
} }
/// Serialize a feed to OPDS xml. /// Serialize a feed to OPDS xml.
pub fn as_xml(&self) -> Result<String, OpdsError> { pub fn as_xml(&self) -> Result<String, AsXmlError> {
let xml = to_string(&self)?; let xml = to_string(&self).context(ToStringSnafu)?;
let mut reader = Reader::from_str(&xml); let mut reader = Reader::from_str(&xml);
reader.config_mut().trim_text(true); reader.config_mut().trim_text(true);
let declaration = BytesDecl::new("1.0", Some("UTF-8"), None); let declaration = BytesDecl::new("1.0", Some("UTF-8"), None);
let mut writer = Writer::new(Cursor::new(Vec::new())); let mut writer = Writer::new(Cursor::new(Vec::new()));
writer.write_event(Event::Decl(declaration))?; writer
.write_event(Event::Decl(declaration))
.context(WriteXmlEventSnafu)?;
let mut feed_start = BytesStart::new("feed"); let mut feed_start = BytesStart::new("feed");
feed_start.push_attribute(("xmlns", "http://www.w3.org/2005/Atom")); feed_start.push_attribute(("xmlns", "http://www.w3.org/2005/Atom"));
@ -103,15 +110,15 @@ impl Feed {
loop { loop {
match reader.read_event() { match reader.read_event() {
Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => { Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => writer
writer.write_event(Event::Start(feed_start.clone()))? .write_event(Event::Start(feed_start.clone()))
} .context(WriteXmlEventSnafu)?,
Ok(Event::Eof) => break, Ok(Event::Eof) => break,
Ok(e) => writer.write_event(e)?, Ok(e) => writer.write_event(e).context(WriteXmlEventSnafu)?,
Err(e) => return Err(e)?, Err(e) => return Err(e).context(ReadXmlEventSnafu)?,
} }
} }
let result = writer.into_inner().into_inner(); let result = writer.into_inner().into_inner();
Ok(String::from_utf8(result)?) String::from_utf8(result).context(BytesToUtf8Snafu)
} }
} }

View file

@ -3,13 +3,16 @@
use std::io::Cursor; use std::io::Cursor;
use quick_xml::{ use quick_xml::{
Reader, Writer,
events::{BytesDecl, BytesStart, Event}, events::{BytesDecl, BytesStart, Event},
se::to_string, se::to_string,
Reader, Writer,
}; };
use serde::Serialize; use serde::Serialize;
use super::error::OpdsError; use super::error::{
AsXmlError, BytesToUtf8Snafu, ReadXmlEventSnafu, ToStringSnafu, WriteXmlEventSnafu,
};
use snafu::ResultExt;
/// Url pointing to a location. /// Url pointing to a location.
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@ -37,29 +40,31 @@ pub struct OpenSearchDescription {
impl OpenSearchDescription { impl OpenSearchDescription {
/// Serialize search information to an open search description xml. /// Serialize search information to an open search description xml.
pub fn as_xml(&self) -> Result<String, OpdsError> { pub fn as_xml(&self) -> Result<String, AsXmlError> {
let xml = to_string(&self)?; let xml = to_string(&self).context(ToStringSnafu)?;
let mut reader = Reader::from_str(&xml); let mut reader = Reader::from_str(&xml);
reader.config_mut().trim_text(true); reader.config_mut().trim_text(true);
let declaration = BytesDecl::new("1.0", Some("UTF-8"), None); let declaration = BytesDecl::new("1.0", Some("UTF-8"), None);
let mut writer = Writer::new(Cursor::new(Vec::new())); let mut writer = Writer::new(Cursor::new(Vec::new()));
writer.write_event(Event::Decl(declaration))?; writer
.write_event(Event::Decl(declaration))
.context(WriteXmlEventSnafu)?;
let mut search_start = BytesStart::new("OpenSearchDescription"); let mut search_start = BytesStart::new("OpenSearchDescription");
search_start.push_attribute(("xmlns", "http://a9.com/-/spec/opensearch/1.1/")); search_start.push_attribute(("xmlns", "http://a9.com/-/spec/opensearch/1.1/"));
loop { loop {
match reader.read_event() { match reader.read_event() {
Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => { Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => writer
writer.write_event(Event::Start(search_start.clone()))? .write_event(Event::Start(search_start.clone()))
} .context(WriteXmlEventSnafu)?,
Ok(Event::Eof) => break, Ok(Event::Eof) => break,
Ok(e) => writer.write_event(e)?, Ok(e) => writer.write_event(e).context(WriteXmlEventSnafu)?,
Err(e) => return Err(e)?, Err(e) => return Err(e).context(ReadXmlEventSnafu)?,
} }
} }
let result = writer.into_inner().into_inner(); let result = writer.into_inner().into_inner();
Ok(String::from_utf8(result)?) String::from_utf8(result).context(BytesToUtf8Snafu)
} }
} }