Compare commits
No commits in common. "57c0fe706ede7eacf5e097bc9e484d1a050dd5ee" and "1c95f4391faae3eff0f15b85a92689a05dd4991b" have entirely different histories.
57c0fe706e
...
1c95f4391f
79 changed files with 1644 additions and 3108 deletions
665
Cargo.lock
generated
665
Cargo.lock
generated
|
@ -167,60 +167,6 @@ dependencies = [
|
|||
"arrayvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"bytes",
|
||||
"form_urlencoded",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"itoa",
|
||||
"matchit",
|
||||
"memchr",
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"serde_urlencoded",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"sync_wrapper",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.75"
|
||||
|
@ -311,8 +257,8 @@ dependencies = [
|
|||
"r2d2_sqlite",
|
||||
"rusqlite",
|
||||
"serde",
|
||||
"snafu",
|
||||
"tempfile",
|
||||
"thiserror 1.0.69",
|
||||
"time",
|
||||
]
|
||||
|
||||
|
@ -343,6 +289,12 @@ version = "1.0.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
|
||||
|
||||
[[package]]
|
||||
name = "cfg_aliases"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.41"
|
||||
|
@ -555,17 +507,6 @@ dependencies = [
|
|||
"crypto-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "displaydoc"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dyn-clone"
|
||||
version = "1.0.19"
|
||||
|
@ -679,6 +620,17 @@ version = "0.3.31"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
|
||||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.31"
|
||||
|
@ -698,9 +650,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-macro",
|
||||
"futures-sink",
|
||||
"futures-task",
|
||||
"pin-project-lite",
|
||||
"pin-utils",
|
||||
"slab",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -751,8 +706,8 @@ dependencies = [
|
|||
"aho-corasick",
|
||||
"bstr",
|
||||
"log",
|
||||
"regex-automata 0.4.9",
|
||||
"regex-syntax 0.8.5",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -809,6 +764,30 @@ dependencies = [
|
|||
"hashbrown 0.15.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "headers"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"headers-core",
|
||||
"http",
|
||||
"httpdate",
|
||||
"mime",
|
||||
"sha1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "headers-core"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
|
||||
dependencies = [
|
||||
"http",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
|
@ -909,7 +888,6 @@ dependencies = [
|
|||
"hyper",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -936,119 +914,12 @@ dependencies = [
|
|||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_collections"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"potential_utf",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_locale_core"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"litemap",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_normalizer"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_collections",
|
||||
"icu_normalizer_data",
|
||||
"icu_properties",
|
||||
"icu_provider",
|
||||
"smallvec",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_normalizer_data"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_collections",
|
||||
"icu_locale_core",
|
||||
"icu_properties_data",
|
||||
"icu_provider",
|
||||
"potential_utf",
|
||||
"zerotrie",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties_data"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
|
||||
|
||||
[[package]]
|
||||
name = "icu_provider"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_locale_core",
|
||||
"stable_deref_trait",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerotrie",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ident_case"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
|
||||
dependencies = [
|
||||
"idna_adapter",
|
||||
"smallvec",
|
||||
"utf8_iter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna_adapter"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
|
||||
dependencies = [
|
||||
"icu_normalizer",
|
||||
"icu_properties",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ignore"
|
||||
version = "0.4.23"
|
||||
|
@ -1059,7 +930,7 @@ dependencies = [
|
|||
"globset",
|
||||
"log",
|
||||
"memchr",
|
||||
"regex-automata 0.4.9",
|
||||
"regex-automata",
|
||||
"same-file",
|
||||
"walkdir",
|
||||
"winapi-util",
|
||||
|
@ -1214,42 +1085,31 @@ version = "0.9.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
|
||||
|
||||
[[package]]
|
||||
name = "litemap"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
|
||||
|
||||
[[package]]
|
||||
name = "little-hesinde"
|
||||
version = "0.3.1"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"calibre-db",
|
||||
"clap",
|
||||
"ignore",
|
||||
"image",
|
||||
"mime_guess",
|
||||
"once_cell",
|
||||
"poem",
|
||||
"quick-xml",
|
||||
"rust-embed",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"sha2",
|
||||
"snafu",
|
||||
"tera",
|
||||
"thiserror 1.0.69",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"utoipa",
|
||||
"utoipa-axum",
|
||||
"utoipa-swagger-ui",
|
||||
"uuid",
|
||||
"zip 4.2.0",
|
||||
"zip",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1277,21 +1137,6 @@ dependencies = [
|
|||
"imgref",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||
dependencies = [
|
||||
"regex-automata 0.1.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchit"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
|
||||
[[package]]
|
||||
name = "maybe-rayon"
|
||||
version = "0.1.1"
|
||||
|
@ -1356,6 +1201,18 @@ version = "1.0.6"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
|
||||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cfg-if",
|
||||
"cfg_aliases",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
|
@ -1610,12 +1467,52 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
|
||||
|
||||
[[package]]
|
||||
name = "potential_utf"
|
||||
version = "0.1.2"
|
||||
name = "poem"
|
||||
version = "3.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
|
||||
checksum = "3ea63e9302279b1ca262d15342760f8d08f04fb974d4997e8baed7d034b94121"
|
||||
dependencies = [
|
||||
"zerovec",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"headers",
|
||||
"hex",
|
||||
"http",
|
||||
"http-body-util",
|
||||
"httpdate",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"nix",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"poem-derive",
|
||||
"regex",
|
||||
"rfc7239",
|
||||
"rust-embed",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"smallvec",
|
||||
"sync_wrapper",
|
||||
"thiserror 2.0.12",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
"wildmatch",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "poem-derive"
|
||||
version = "3.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "824e7fe35343b7fe354e5d4ac444ddbe674676ebba4b4e48565835661033d338"
|
||||
dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1633,6 +1530,15 @@ dependencies = [
|
|||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-crate"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35"
|
||||
dependencies = [
|
||||
"toml_edit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.95"
|
||||
|
@ -1880,17 +1786,8 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
|||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata 0.4.9",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||
dependencies = [
|
||||
"regex-syntax 0.6.29",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1901,21 +1798,24 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
|||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax 0.8.5",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "rfc7239"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a82f1d1e38e9a85bb58ffcfadf22ed6f2c94e8cd8581ec2b0f80a2a6858350f"
|
||||
dependencies = [
|
||||
"uncased",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rgb"
|
||||
version = "0.8.50"
|
||||
|
@ -2082,16 +1982,6 @@ dependencies = [
|
|||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_path_to_error"
|
||||
version = "0.1.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.9"
|
||||
|
@ -2145,6 +2035,17 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1"
|
||||
version = "0.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.9"
|
||||
|
@ -2223,27 +2124,6 @@ version = "1.15.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "snafu"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "320b01e011bf8d5d7a4a4a4be966d9160968935849c83b918827f6a435e7f627"
|
||||
dependencies = [
|
||||
"snafu-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "snafu-derive"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1961e2ef424c1424204d3a5d6975f934f56b6d50ff5732382d84ebf460e147f7"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.5.10"
|
||||
|
@ -2254,12 +2134,6 @@ dependencies = [
|
|||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
|
@ -2282,16 +2156,8 @@ name = "sync_wrapper"
|
|||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
|
||||
|
||||
[[package]]
|
||||
name = "synstructure"
|
||||
version = "0.13.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"futures-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2428,16 +2294,6 @@ dependencies = [
|
|||
"time-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinystr"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.45.1"
|
||||
|
@ -2513,57 +2369,12 @@ dependencies = [
|
|||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"pin-project-lite",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"bytes",
|
||||
"http",
|
||||
"http-body",
|
||||
"pin-project-lite",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-layer"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
|
||||
|
||||
[[package]]
|
||||
name = "tower-service"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
"tracing-core",
|
||||
|
@ -2607,14 +2418,10 @@ version = "0.3.19"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
|
||||
dependencies = [
|
||||
"matchers",
|
||||
"nu-ansi-term",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"sharded-slab",
|
||||
"smallvec",
|
||||
"thread_local",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
]
|
||||
|
@ -2631,6 +2438,15 @@ version = "0.1.7"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
|
||||
|
||||
[[package]]
|
||||
name = "uncased"
|
||||
version = "0.9.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697"
|
||||
dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unic-char-property"
|
||||
version = "0.9.0"
|
||||
|
@ -2693,91 +2509,12 @@ version = "1.0.18"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "utf8_iter"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "utoipa"
|
||||
version = "5.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2fcc29c80c21c31608227e0912b2d7fddba57ad76b606890627ba8ee7964e993"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"utoipa-gen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "utoipa-axum"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c25bae5bccc842449ec0c5ddc5cbb6a3a1eaeac4503895dc105a1138f8234a0"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"paste",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"utoipa",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "utoipa-gen"
|
||||
version = "5.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d79d08d92ab8af4c5e8a6da20c47ae3f61a0f1dabc1997cdf2d082b757ca08b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "utoipa-swagger-ui"
|
||||
version = "9.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d047458f1b5b65237c2f6dc6db136945667f40a7668627b3490b9513a3d43a55"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"base64",
|
||||
"mime_guess",
|
||||
"regex",
|
||||
"rust-embed",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"url",
|
||||
"utoipa",
|
||||
"utoipa-swagger-ui-vendored",
|
||||
"zip 3.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "utoipa-swagger-ui-vendored"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2eebbbfe4093922c2b6734d7c679ebfebd704a0d7e56dfcb0d05818ce28977d"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.17.0"
|
||||
|
@ -2908,6 +2645,12 @@ dependencies = [
|
|||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wildmatch"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
|
@ -3171,36 +2914,6 @@ dependencies = [
|
|||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "writeable"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
|
||||
|
||||
[[package]]
|
||||
name = "yoke"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"stable_deref_trait",
|
||||
"yoke-derive",
|
||||
"zerofrom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yoke-derive"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.26"
|
||||
|
@ -3221,74 +2934,6 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerofrom"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
|
||||
dependencies = [
|
||||
"zerofrom-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerofrom-derive"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerotrie"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerovec"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428"
|
||||
dependencies = [
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerovec-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerovec-derive"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "12598812502ed0105f607f941c386f43d441e00148fce9dec3ca5ffb0bde9308"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"crc32fast",
|
||||
"flate2",
|
||||
"indexmap 2.10.0",
|
||||
"memchr",
|
||||
"zopfli",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "4.2.0"
|
||||
|
|
|
@ -6,7 +6,7 @@ members = [
|
|||
|
||||
[workspace.dependencies]
|
||||
serde = "1.0.219"
|
||||
snafu = { version = "0.8.6", features = ["rust_1_81"] }
|
||||
thiserror = "1.0.61"
|
||||
time = { version = "0.3.41", features = ["macros", "serde", "formatting", "parsing" ] }
|
||||
|
||||
[workspace.package]
|
||||
|
|
|
@ -12,6 +12,6 @@ r2d2 = "0.8.10"
|
|||
r2d2_sqlite = "0.30.0"
|
||||
rusqlite = { version = "0.36.0", features = ["bundled", "time"] }
|
||||
serde = { workspace = true }
|
||||
snafu = { workspace = true }
|
||||
tempfile = "3.20.0"
|
||||
thiserror = { workspace = true }
|
||||
time = { workspace = true }
|
||||
|
|
|
@ -1,17 +1,15 @@
|
|||
//! Bundle all functions together.
|
||||
|
||||
use std::{
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use r2d2::Pool;
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tempfile::{NamedTempFile, PersistError};
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use crate::{
|
||||
data::{self, author::Author, book::Book, pagination::SortOrder, series::Series},
|
||||
data::{
|
||||
author::Author, book::Book, error::DataStoreError, pagination::SortOrder, series::Series,
|
||||
},
|
||||
search::search,
|
||||
};
|
||||
|
||||
|
@ -23,123 +21,16 @@ pub struct Calibre {
|
|||
search_db_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when loading the calibre database.
|
||||
pub enum LoadError {
|
||||
/// A failure to create the database connection pool.
|
||||
#[snafu(display("Failed to create database connection pool."))]
|
||||
CreateDbPool { source: r2d2::Error },
|
||||
/// A failure to create the temporary database view.
|
||||
#[snafu(display("Failed to create temporary database view."))]
|
||||
TmpDb { source: io::Error },
|
||||
/// A failure to keep the temporary database from being deleted.
|
||||
#[snafu(display("Failed to keep temporary database from deletion."))]
|
||||
PersistTmpDb { source: PersistError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when accessing the data store.
|
||||
pub enum DataStoreError {
|
||||
/// A failure to get a database connection from the pool.
|
||||
#[snafu(display("Failed to get database connection from pool."))]
|
||||
GetDbConn { source: r2d2::Error },
|
||||
|
||||
/// A failure during a search operation.
|
||||
#[snafu(display("Failed to search."))]
|
||||
Search { source: crate::search::SearchError },
|
||||
|
||||
/// A failure to fetch multiple books.
|
||||
#[snafu(display("Failed to fetch multiple books."))]
|
||||
MultipleBooks {
|
||||
source: data::book::MultipleBooksError,
|
||||
},
|
||||
/// A failure to fetch recent books.
|
||||
#[snafu(display("Failed to fetch recent books."))]
|
||||
RecentBooks {
|
||||
source: data::book::RecentBooksError,
|
||||
},
|
||||
/// A failure to fetch a single book.
|
||||
#[snafu(display("Failed to fetch book."))]
|
||||
ScalarBook { source: data::book::ScalarBookError },
|
||||
/// A failure to get a series' books.
|
||||
#[snafu(display("Failed to get a series' books."))]
|
||||
SeriesBooks { source: data::book::SeriesBookError },
|
||||
/// A failure to fetch an author's books.
|
||||
#[snafu(display("Failed to fetch an author's books."))]
|
||||
AuthorBooks {
|
||||
source: data::book::AuthorBooksError,
|
||||
},
|
||||
/// A failure to check for previous books.
|
||||
#[snafu(display("Failed to check if there are previous books."))]
|
||||
HasPreviousBooks {
|
||||
source: data::book::PreviousBooksError,
|
||||
},
|
||||
/// A failure to check for more books.
|
||||
#[snafu(display("Failed to check if there are more books."))]
|
||||
HasMoreBooks { source: data::book::MoreBooksError },
|
||||
|
||||
/// A failure to fetch multiple authors.
|
||||
#[snafu(display("Failed to fetch multiple authors."))]
|
||||
MultipleAuthors {
|
||||
source: data::author::MultipleAuthorsError,
|
||||
},
|
||||
/// A failure to fetch a single author.
|
||||
#[snafu(display("Failed to fetch author."))]
|
||||
ScalarAuthor {
|
||||
source: data::author::ScalarAuthorError,
|
||||
},
|
||||
/// A failure to fetch a book's author.
|
||||
#[snafu(display("Failed to fetch book's author."))]
|
||||
BookAuthor {
|
||||
source: data::author::BookAuthorError,
|
||||
},
|
||||
/// A failure to check for previous authors.
|
||||
#[snafu(display("Failed to check if there are previous authors."))]
|
||||
HasPreviousAuthors {
|
||||
source: data::author::PreviousAuthorsError,
|
||||
},
|
||||
/// A failure to check for more authors.
|
||||
#[snafu(display("Failed to check if there are more authors."))]
|
||||
HasMoreAuthors {
|
||||
source: data::author::MoreAuthorsError,
|
||||
},
|
||||
|
||||
/// A failure to fetch multiple series.
|
||||
#[snafu(display("Failed to fetch multiple series."))]
|
||||
MultipleSeries {
|
||||
source: data::series::MultiplSeriesError,
|
||||
},
|
||||
/// A failure to fetch a single series.
|
||||
#[snafu(display("Failed to fetch series."))]
|
||||
ScalarSeries {
|
||||
source: data::series::ScalarSeriesError,
|
||||
},
|
||||
/// A failure to get the series a book belongs to.
|
||||
#[snafu(display("Failed to get the series a book belongs to."))]
|
||||
BookSeries {
|
||||
source: data::series::SeriesBooksError,
|
||||
},
|
||||
/// A failure to check for previous series.
|
||||
#[snafu(display("Failed to check if there are previous series."))]
|
||||
HasPreviousSeries {
|
||||
source: data::series::PreviousSeriesError,
|
||||
},
|
||||
/// A failure to check for more series.
|
||||
#[snafu(display("Failed to check if there are more series."))]
|
||||
HasMoreSeries {
|
||||
source: data::series::MoreSeriesError,
|
||||
},
|
||||
}
|
||||
impl Calibre {
|
||||
/// Open a connection to the calibre database.
|
||||
///
|
||||
/// Fail if the database file can not be opened or not be found.
|
||||
pub fn load(path: &Path) -> Result<Self, LoadError> {
|
||||
pub fn load(path: &Path) -> Result<Self, DataStoreError> {
|
||||
let manager = SqliteConnectionManager::file(path);
|
||||
let pool = r2d2::Pool::new(manager).context(CreateDbPoolSnafu)?;
|
||||
let pool = r2d2::Pool::new(manager)?;
|
||||
|
||||
let tmpfile = NamedTempFile::new().context(TmpDbSnafu)?;
|
||||
let (_, search_db_path) = tmpfile.keep().context(PersistTmpDbSnafu)?;
|
||||
let tmpfile = NamedTempFile::new()?;
|
||||
let (_, search_db_path) = tmpfile.keep()?;
|
||||
|
||||
Ok(Self {
|
||||
pool,
|
||||
|
@ -151,7 +42,7 @@ impl Calibre {
|
|||
///
|
||||
/// See https://www.sqlite.org/fts5.html#full_text_query_syntax for syntax.
|
||||
pub fn search(&self, query: &str) -> Result<Vec<Book>, DataStoreError> {
|
||||
search(query, &self.pool, &self.search_db_path).context(SearchSnafu)
|
||||
search(query, &self.pool, &self.search_db_path)
|
||||
}
|
||||
|
||||
/// Fetch book data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
|
||||
|
@ -162,8 +53,8 @@ impl Calibre {
|
|||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Book::multiple(&conn, limit, cursor, sort_order).context(MultipleBooksSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Book::multiple(&conn, limit, cursor, sort_order)
|
||||
}
|
||||
|
||||
/// Fetch author data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
|
||||
|
@ -174,8 +65,8 @@ impl Calibre {
|
|||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Author>, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Author::multiple(&conn, limit, cursor, sort_order).context(MultipleAuthorsSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Author::multiple(&conn, limit, cursor, sort_order)
|
||||
}
|
||||
|
||||
/// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`,
|
||||
|
@ -187,26 +78,26 @@ impl Calibre {
|
|||
cursor: Option<&str>,
|
||||
sort_order: SortOrder,
|
||||
) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Book::author_books(&conn, author_id, limit, cursor, sort_order).context(AuthorBooksSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Book::author_books(&conn, author_id, limit, cursor, sort_order)
|
||||
}
|
||||
|
||||
/// Get recent books up to a limit of `limit`.
|
||||
pub fn recent_books(&self, limit: u64) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Book::recents(&conn, limit).context(RecentBooksSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Book::recents(&conn, limit)
|
||||
}
|
||||
|
||||
/// Get a single book, specified `id`.
|
||||
pub fn scalar_book(&self, id: u64) -> Result<Book, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Book::scalar_book(&conn, id).context(ScalarBookSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Book::scalar_book(&conn, id)
|
||||
}
|
||||
|
||||
/// Get the author to a book with id `id`.
|
||||
pub fn book_author(&self, id: u64) -> Result<Author, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Author::book_author(&conn, id).context(BookAuthorSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Author::book_author(&conn, id)
|
||||
}
|
||||
|
||||
/// Fetch series data from calibre, starting at `cursor`, fetching up to an amount of `limit` and
|
||||
|
@ -217,73 +108,72 @@ impl Calibre {
|
|||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Series>, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Series::multiple(&conn, limit, cursor, sort_order).context(MultipleSeriesSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Series::multiple(&conn, limit, cursor, sort_order)
|
||||
}
|
||||
|
||||
/// Get the series a book with id `id` is in, as well as the book's position within the series.
|
||||
pub fn book_series(&self, id: u64) -> Result<Option<(Series, f64)>, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Series::book_series(&conn, id).context(BookSeriesSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Series::book_series(&conn, id)
|
||||
}
|
||||
|
||||
/// Get all books belonging to the series with id `id`.
|
||||
pub fn series_books(&self, id: u64) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Book::series_books(&conn, id).context(SeriesBooksSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Book::series_books(&conn, id)
|
||||
}
|
||||
|
||||
/// Check if there are more authors before the specified cursor.
|
||||
pub fn has_previous_authors(&self, author_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Author::has_previous_authors(&conn, author_sort).context(HasPreviousAuthorsSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Author::has_previous_authors(&conn, author_sort)
|
||||
}
|
||||
|
||||
/// Check if there are more authors after the specified cursor.
|
||||
pub fn has_more_authors(&self, author_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Author::has_more_authors(&conn, author_sort).context(HasMoreAuthorsSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Author::has_more_authors(&conn, author_sort)
|
||||
}
|
||||
|
||||
/// Check if there are more books before the specified cursor.
|
||||
pub fn has_previous_books(&self, book_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Book::has_previous_books(&conn, book_sort).context(HasPreviousBooksSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Book::has_previous_books(&conn, book_sort)
|
||||
}
|
||||
|
||||
/// Check if there are more books after the specified cursor.
|
||||
pub fn has_more_books(&self, book_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Book::has_more_books(&conn, book_sort).context(HasMoreBooksSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Book::has_more_books(&conn, book_sort)
|
||||
}
|
||||
|
||||
/// Check if there are more series before the specified cursor.
|
||||
pub fn has_previous_series(&self, series_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Series::has_previous_series(&conn, series_sort).context(HasPreviousSeriesSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Series::has_previous_series(&conn, series_sort)
|
||||
}
|
||||
|
||||
/// Check if there are more series after the specified cursor.
|
||||
pub fn has_more_series(&self, series_sort: &str) -> Result<bool, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Series::has_more_series(&conn, series_sort).context(HasMoreSeriesSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Series::has_more_series(&conn, series_sort)
|
||||
}
|
||||
|
||||
/// Fetch a single author with id `id`.
|
||||
pub fn scalar_author(&self, id: u64) -> Result<Author, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Author::scalar_author(&conn, id).context(ScalarAuthorSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Author::scalar_author(&conn, id)
|
||||
}
|
||||
|
||||
/// Fetch a single series with id `id`.
|
||||
pub fn scalar_series(&self, id: u64) -> Result<Series, DataStoreError> {
|
||||
let conn = self.pool.get().context(GetDbConnSnafu)?;
|
||||
Series::scalar_series(&conn, id).context(ScalarSeriesSnafu)
|
||||
let conn = self.pool.get()?;
|
||||
Series::scalar_series(&conn, id)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
/// Tests for the calibre module.
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
//! Data types and functions for interacting with the calibre database.
|
||||
pub mod author;
|
||||
pub mod book;
|
||||
pub mod error;
|
||||
pub mod pagination;
|
||||
pub mod series;
|
|
@ -1,10 +1,12 @@
|
|||
//! Author data.
|
||||
|
||||
use rusqlite::{Connection, Row, named_params};
|
||||
use rusqlite::{named_params, Connection, Row};
|
||||
use serde::Serialize;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
|
||||
use super::{
|
||||
error::DataStoreError,
|
||||
pagination::{Pagination, SortOrder},
|
||||
};
|
||||
|
||||
/// Author in calibre.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
|
@ -17,48 +19,7 @@ pub struct Author {
|
|||
pub sort: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch multiple authors."))]
|
||||
pub struct MultipleAuthorsError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a book's author.
|
||||
pub enum BookAuthorError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareBookAuthor { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteBookAuthor { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a single author.
|
||||
pub enum ScalarAuthorError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareScalarAuthor { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteScalarAuthor { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for previous authors."))]
|
||||
pub struct PreviousAuthorsError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for more authors."))]
|
||||
pub struct MoreAuthorsError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
impl Author {
|
||||
/// Create an author from a database row.
|
||||
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
|
@ -74,54 +35,44 @@ impl Author {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Self>, MultipleAuthorsError> {
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
let pagination = Pagination::new("sort", cursor, limit, *sort_order);
|
||||
pagination
|
||||
.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM authors",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
.context(MultipleAuthorsSnafu)
|
||||
pagination.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM authors",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
}
|
||||
|
||||
/// Get the author to a book with id `id`.
|
||||
pub fn book_author(conn: &Connection, id: u64) -> Result<Self, BookAuthorError> {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT authors.id, authors.name, authors.sort FROM authors \
|
||||
pub fn book_author(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT authors.id, authors.name, authors.sort FROM authors \
|
||||
INNER JOIN books_authors_link ON authors.id = books_authors_link.author \
|
||||
WHERE books_authors_link.book = (:id)",
|
||||
)
|
||||
.context(PrepareBookAuthorSnafu)?;
|
||||
)?;
|
||||
let params = named_params! { ":id": id };
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteBookAuthorSnafu)
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
}
|
||||
|
||||
/// Fetch a single author with id `id`.
|
||||
pub fn scalar_author(conn: &Connection, id: u64) -> Result<Self, ScalarAuthorError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT id, name, sort FROM authors WHERE id = (:id)")
|
||||
.context(PrepareScalarAuthorSnafu)?;
|
||||
pub fn scalar_author(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
let mut stmt = conn.prepare("SELECT id, name, sort FROM authors WHERE id = (:id)")?;
|
||||
let params = named_params! { ":id": id };
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteScalarAuthorSnafu)
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
}
|
||||
|
||||
/// Check if there are more authors before the specified cursor.
|
||||
pub fn has_previous_authors(
|
||||
conn: &Connection,
|
||||
sort_name: &str,
|
||||
) -> Result<bool, PreviousAuthorsError> {
|
||||
) -> Result<bool, DataStoreError> {
|
||||
Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::DESC)
|
||||
.context(PreviousAuthorsSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more authors after the specified cursor.
|
||||
pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result<bool, MoreAuthorsError> {
|
||||
pub fn has_more_authors(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> {
|
||||
Pagination::has_prev_or_more(conn, "authors", sort_name, &SortOrder::ASC)
|
||||
.context(MoreAuthorsSnafu)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
//! Book data.
|
||||
|
||||
use rusqlite::{Connection, Row, named_params};
|
||||
use rusqlite::{named_params, Connection, Row};
|
||||
use serde::Serialize;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
|
||||
use super::{
|
||||
error::DataStoreError,
|
||||
pagination::{Pagination, SortOrder},
|
||||
};
|
||||
|
||||
/// Book in calibre.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
|
@ -26,65 +28,7 @@ pub struct Book {
|
|||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch multiple books."))]
|
||||
pub struct MultipleBooksError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch author's books."))]
|
||||
pub struct AuthorBooksError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a series' books.
|
||||
pub enum SeriesBookError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareSeriesBook { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteSeriesBook { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching recent books.
|
||||
pub enum RecentBooksError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareRecentBooks { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteRecentBooks { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a single book.
|
||||
pub enum ScalarBookError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareScalarBook { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteScalarBook { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for previous books."))]
|
||||
pub struct PreviousBooksError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for more books."))]
|
||||
pub struct MoreBooksError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
impl Book {
|
||||
/// Create a book from a database row.
|
||||
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
|
@ -104,7 +48,7 @@ impl Book {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Self>, MultipleBooksError> {
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
let pagination = Pagination::new("sort", cursor, limit, *sort_order);
|
||||
pagination.paginate(
|
||||
conn,
|
||||
|
@ -112,7 +56,7 @@ impl Book {
|
|||
FROM books LEFT JOIN comments ON books.id = comments.book",
|
||||
&[],
|
||||
Self::from_row,
|
||||
).context(MultipleBooksSnafu)
|
||||
)
|
||||
}
|
||||
|
||||
/// Fetch books for an author specified by `author_id`, paginate the books by starting at `cursor`,
|
||||
|
@ -123,7 +67,7 @@ impl Book {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: SortOrder,
|
||||
) -> Result<Vec<Self>, AuthorBooksError> {
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
let pagination = Pagination::new("books.sort", cursor, limit, sort_order);
|
||||
pagination.paginate(
|
||||
conn,
|
||||
|
@ -133,11 +77,11 @@ impl Book {
|
|||
WHERE books_authors_link.author = (:author_id) AND",
|
||||
&[(":author_id", &author_id)],
|
||||
Self::from_row,
|
||||
).context(AuthorBooksSnafu)
|
||||
)
|
||||
}
|
||||
|
||||
/// Get all books belonging to the series with id `id`.
|
||||
pub fn series_books(conn: &Connection, id: u64) -> Result<Vec<Book>, SeriesBookError> {
|
||||
pub fn series_books(conn: &Connection, id: u64) -> Result<Vec<Book>, DataStoreError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text FROM series \
|
||||
INNER JOIN books_series_link ON series.id = books_series_link.series \
|
||||
|
@ -145,50 +89,40 @@ impl Book {
|
|||
LEFT JOIN comments ON books.id = comments.book \
|
||||
WHERE books_series_link.series = (:id) \
|
||||
ORDER BY books.series_index",
|
||||
).context(PrepareSeriesBookSnafu)?;
|
||||
)?;
|
||||
let params = named_params! { ":id": id };
|
||||
let iter = stmt
|
||||
.query_map(params, Self::from_row)
|
||||
.context(ExecuteSeriesBookSnafu)?;
|
||||
let iter = stmt.query_map(params, Self::from_row)?;
|
||||
Ok(iter.filter_map(Result::ok).collect())
|
||||
}
|
||||
|
||||
/// Get recent books up to a limit of `limit`.
|
||||
pub fn recents(conn: &Connection, limit: u64) -> Result<Vec<Self>, RecentBooksError> {
|
||||
pub fn recents(conn: &Connection, limit: u64) -> Result<Vec<Self>, DataStoreError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \
|
||||
FROM books LEFT JOIN comments ON books.id = comments.book ORDER BY books.timestamp DESC LIMIT (:limit)"
|
||||
).context(PrepareRecentBooksSnafu)?;
|
||||
)?;
|
||||
let params = named_params! { ":limit": limit };
|
||||
let iter = stmt
|
||||
.query_map(params, Self::from_row)
|
||||
.context(ExecuteRecentBooksSnafu)?;
|
||||
let iter = stmt.query_map(params, Self::from_row)?;
|
||||
Ok(iter.filter_map(Result::ok).collect())
|
||||
}
|
||||
|
||||
/// Get a single book, specified `id`.
|
||||
pub fn scalar_book(conn: &Connection, id: u64) -> Result<Self, ScalarBookError> {
|
||||
pub fn scalar_book(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT books.id, books.title, books.sort, books.path, books.uuid, books.last_modified, comments.text \
|
||||
FROM books LEFT JOIN comments WHERE books.id = (:id)",
|
||||
).context(PrepareScalarBookSnafu)?;
|
||||
)?;
|
||||
let params = named_params! { ":id": id };
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteScalarBookSnafu)
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
}
|
||||
|
||||
/// Check if there are more books before the specified cursor.
|
||||
pub fn has_previous_books(
|
||||
conn: &Connection,
|
||||
sort_title: &str,
|
||||
) -> Result<bool, PreviousBooksError> {
|
||||
pub fn has_previous_books(conn: &Connection, sort_title: &str) -> Result<bool, DataStoreError> {
|
||||
Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::DESC)
|
||||
.context(PreviousBooksSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more books after the specified cursor.
|
||||
pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result<bool, MoreBooksError> {
|
||||
pub fn has_more_books(conn: &Connection, sort_title: &str) -> Result<bool, DataStoreError> {
|
||||
Pagination::has_prev_or_more(conn, "books", sort_title, &SortOrder::ASC)
|
||||
.context(MoreBooksSnafu)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1,42 @@
|
|||
//! Error handling for calibre database access.
|
||||
|
||||
use std::io;
|
||||
|
||||
use tempfile::PersistError;
|
||||
use thiserror::Error;
|
||||
use time::error::Parse;
|
||||
|
||||
/// Errors from accessing the calibre database.
|
||||
#[derive(Error, Debug)]
|
||||
#[error("data store error")]
|
||||
pub enum DataStoreError {
|
||||
/// Found no entries for the query.
|
||||
#[error("no results")]
|
||||
NoResults(rusqlite::Error),
|
||||
/// Error with SQLite.
|
||||
#[error("sqlite error")]
|
||||
SqliteError(rusqlite::Error),
|
||||
/// Error connecting to the database.
|
||||
#[error("connection error")]
|
||||
ConnectionError(#[from] r2d2::Error),
|
||||
/// Error parsing a datetime from the database.
|
||||
#[error("failed to parse datetime")]
|
||||
DateTimeError(#[from] Parse),
|
||||
/// Error creating the search database.
|
||||
#[error("failed to create search database")]
|
||||
SearchDbError(#[from] io::Error),
|
||||
/// Error marking the search database as persistent.
|
||||
#[error("failed to persist search database")]
|
||||
PersistSearchDbError(#[from] PersistError),
|
||||
}
|
||||
|
||||
/// Convert an SQLite error into a proper NoResults one if the query
|
||||
/// returned no rows, return others as is.
|
||||
impl From<rusqlite::Error> for DataStoreError {
|
||||
fn from(error: rusqlite::Error) -> Self {
|
||||
match error {
|
||||
rusqlite::Error::QueryReturnedNoRows => DataStoreError::NoResults(error),
|
||||
_ => DataStoreError::SqliteError(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
//! Cursor pagination handling.
|
||||
|
||||
use rusqlite::{Connection, Row, ToSql, named_params};
|
||||
use rusqlite::{named_params, Connection, Row, ToSql};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use super::error::DataStoreError;
|
||||
|
||||
/// How to sort query results. Signifying whether we are paginating forwards or backwards.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)]
|
||||
|
@ -25,28 +26,6 @@ pub struct Pagination<'a> {
|
|||
pub sort_order: SortOrder,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when checking for previous or more items.
|
||||
pub enum HasPrevOrMoreError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareHasPrevOrMore { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteHasPrevOrMore { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur during pagination.
|
||||
pub enum PaginationError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PreparePagination { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecutePagination { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
impl<'a> Pagination<'a> {
|
||||
/// Create a new pagination.
|
||||
pub fn new(
|
||||
|
@ -78,16 +57,14 @@ impl<'a> Pagination<'a> {
|
|||
table: &str,
|
||||
sort: &str,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<bool, HasPrevOrMoreError> {
|
||||
) -> Result<bool, DataStoreError> {
|
||||
let comparison = Pagination::sort_order_to_sql(sort_order);
|
||||
|
||||
let mut stmt = conn.prepare(&format!(
|
||||
"SELECT Count(1) FROM {table} WHERE sort {comparison} (:sort) ORDER BY sort {sort_order:?}"
|
||||
)).context(PrepareHasPrevOrMoreSnafu)?;
|
||||
))?;
|
||||
let params = named_params! { ":sort": sort};
|
||||
let count: u64 = stmt
|
||||
.query_row(params, |x| x.get(0))
|
||||
.context(ExecuteHasPrevOrMoreSnafu)?;
|
||||
let count: u64 = stmt.query_row(params, |x| x.get(0))?;
|
||||
|
||||
Ok(count > 0)
|
||||
}
|
||||
|
@ -99,7 +76,7 @@ impl<'a> Pagination<'a> {
|
|||
statement: &str,
|
||||
params: &[(&str, &dyn ToSql)],
|
||||
processor: F,
|
||||
) -> Result<Vec<T>, PaginationError>
|
||||
) -> Result<Vec<T>, DataStoreError>
|
||||
where
|
||||
F: FnMut(&Row<'_>) -> Result<T, rusqlite::Error>,
|
||||
{
|
||||
|
@ -125,7 +102,7 @@ impl<'a> Pagination<'a> {
|
|||
// DANGER: vulnerable to SQL injection if statement or sort_col variable is influenced by user input
|
||||
let mut stmt = conn.prepare(&format!(
|
||||
"SELECT * FROM ({statement} {where_sql} {sort_col} {comparison} (:cursor) ORDER BY {sort_col} {sort_order:?} LIMIT (:limit)) AS t ORDER BY {sort_col_wrapped} ASC"
|
||||
)).context(PreparePaginationSnafu)?;
|
||||
))?;
|
||||
let params = [
|
||||
&[
|
||||
(":cursor", &cursor as &dyn ToSql),
|
||||
|
@ -134,9 +111,7 @@ impl<'a> Pagination<'a> {
|
|||
params,
|
||||
]
|
||||
.concat();
|
||||
let iter = stmt
|
||||
.query_map(params.as_slice(), processor)
|
||||
.context(ExecutePaginationSnafu)?;
|
||||
let iter = stmt.query_map(params.as_slice(), processor)?;
|
||||
Ok(iter.filter_map(Result::ok).collect())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
//! Series data.
|
||||
|
||||
use rusqlite::{Connection, Row, named_params};
|
||||
use rusqlite::{named_params, Connection, Row};
|
||||
use serde::Serialize;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use super::pagination::{HasPrevOrMoreError, Pagination, PaginationError, SortOrder};
|
||||
use super::{
|
||||
error::DataStoreError,
|
||||
pagination::{Pagination, SortOrder},
|
||||
};
|
||||
|
||||
/// Series in calibre.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
|
@ -17,48 +19,7 @@ pub struct Series {
|
|||
pub sort: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to fetch multiple series."))]
|
||||
pub struct MultiplSeriesError {
|
||||
source: PaginationError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a series' books.
|
||||
pub enum SeriesBooksError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareSeriesBooks { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteSeriesBooks { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a single series.
|
||||
pub enum ScalarSeriesError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareScalarSeries { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteScalarSeries { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for previous series."))]
|
||||
pub struct PreviousSeriesError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(display("Failed to check for more series."))]
|
||||
pub struct MoreSeriesError {
|
||||
source: HasPrevOrMoreError,
|
||||
}
|
||||
|
||||
impl Series {
|
||||
/// Create a series from a database row.
|
||||
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
|
@ -74,41 +35,34 @@ impl Series {
|
|||
limit: u64,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Vec<Self>, MultiplSeriesError> {
|
||||
) -> Result<Vec<Self>, DataStoreError> {
|
||||
let pagination = Pagination::new("sort", cursor, limit, *sort_order);
|
||||
pagination
|
||||
.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM series",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
.context(MultiplSeriesSnafu)
|
||||
pagination.paginate(
|
||||
conn,
|
||||
"SELECT id, name, sort FROM series",
|
||||
&[],
|
||||
Self::from_row,
|
||||
)
|
||||
}
|
||||
|
||||
/// Fetch a single series with id `id`.
|
||||
pub fn scalar_series(conn: &Connection, id: u64) -> Result<Self, ScalarSeriesError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT id, name, sort FROM series WHERE id = (:id)")
|
||||
.context(PrepareScalarSeriesSnafu)?;
|
||||
pub fn scalar_series(conn: &Connection, id: u64) -> Result<Self, DataStoreError> {
|
||||
let mut stmt = conn.prepare("SELECT id, name, sort FROM series WHERE id = (:id)")?;
|
||||
let params = named_params! { ":id": id };
|
||||
stmt.query_row(params, Self::from_row)
|
||||
.context(ExecuteScalarSeriesSnafu)
|
||||
Ok(stmt.query_row(params, Self::from_row)?)
|
||||
}
|
||||
|
||||
/// Get the series a book with id `id` is in, as well as the book's position within the series.
|
||||
pub fn book_series(
|
||||
conn: &Connection,
|
||||
book_id: u64,
|
||||
) -> Result<Option<(Self, f64)>, SeriesBooksError> {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT series.id, series.name, series.sort, books.series_index FROM series \
|
||||
) -> Result<Option<(Self, f64)>, DataStoreError> {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT series.id, series.name, series.sort, books.series_index FROM series \
|
||||
INNER JOIN books_series_link ON series.id = books_series_link.series \
|
||||
INNER JOIN books ON books.id = books_series_link.book \
|
||||
WHERE books_series_link.book = (:id)",
|
||||
)
|
||||
.context(PrepareSeriesBooksSnafu)?;
|
||||
)?;
|
||||
let params = named_params! { ":id": book_id };
|
||||
|
||||
let from_row = |row: &Row<'_>| {
|
||||
|
@ -120,22 +74,17 @@ impl Series {
|
|||
match stmt.query_row(params, from_row) {
|
||||
Ok(series) => Ok(Some(series)),
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
|
||||
Err(e) => Err(e).context(ExecuteSeriesBooksSnafu),
|
||||
Err(e) => Err(DataStoreError::SqliteError(e)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if there are more series before the specified cursor.
|
||||
pub fn has_previous_series(
|
||||
conn: &Connection,
|
||||
sort_name: &str,
|
||||
) -> Result<bool, PreviousSeriesError> {
|
||||
pub fn has_previous_series(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> {
|
||||
Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::DESC)
|
||||
.context(PreviousSeriesSnafu)
|
||||
}
|
||||
|
||||
/// Check if there are more series after the specified cursor.
|
||||
pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result<bool, MoreSeriesError> {
|
||||
pub fn has_more_series(conn: &Connection, sort_name: &str) -> Result<bool, DataStoreError> {
|
||||
Pagination::has_prev_or_more(conn, "series", sort_name, &SortOrder::ASC)
|
||||
.context(MoreSeriesSnafu)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
//! Read data from a calibre library, leveraging its SQLite metadata database.
|
||||
|
||||
pub mod calibre;
|
||||
/// Data structs for the calibre database.
|
||||
pub mod data;
|
||||
pub mod search;
|
||||
|
||||
/// Data structs for the calibre database.
|
||||
pub mod data {
|
||||
pub mod author;
|
||||
pub mod book;
|
||||
pub mod error;
|
||||
pub mod pagination;
|
||||
pub mod series;
|
||||
}
|
||||
|
|
|
@ -10,9 +10,8 @@ use std::path::Path;
|
|||
use r2d2::{Pool, PooledConnection};
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
use rusqlite::named_params;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use crate::data::book::Book;
|
||||
use crate::data::{book::Book, error::DataStoreError};
|
||||
|
||||
/// A lot of joins but only run once at startup.
|
||||
const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data)
|
||||
|
@ -34,76 +33,20 @@ const SEARCH_INIT_QUERY: &str = "INSERT INTO search.fts(book_id, data)
|
|||
LEFT JOIN main.series AS s ON b2s.series = s.id
|
||||
GROUP BY b.id";
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when ensuring the search database is available.
|
||||
pub enum EnsureSearchDbError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareEnsureSearch { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteEnsureSearch { source: rusqlite::Error },
|
||||
/// A failure to attach the database.
|
||||
#[snafu(display("Failed to attach database."))]
|
||||
Attach { source: AttachError },
|
||||
/// A failure to initialize the database.
|
||||
#[snafu(display("Failed to initialize database."))]
|
||||
Init { source: InitError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when attaching the search database.
|
||||
pub enum AttachError {
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteAttach { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when initializing the search database.
|
||||
pub enum InitError {
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareInit { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteInit { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when searching.
|
||||
pub enum SearchError {
|
||||
/// A failure to ensure the search database is initialized.
|
||||
#[snafu(display("Failed ensure the search db is initialized."))]
|
||||
EnsureDb { source: EnsureSearchDbError },
|
||||
/// A failure to get a connection from the pool.
|
||||
#[snafu(display("Failed to get connection from pool."))]
|
||||
Connection { source: r2d2::Error },
|
||||
/// A failure to prepare the SQL statement.
|
||||
#[snafu(display("Failed to prepare statement."))]
|
||||
PrepareSearch { source: rusqlite::Error },
|
||||
/// A failure to execute the SQL statement.
|
||||
#[snafu(display("Failed to execute statement."))]
|
||||
ExecuteSearch { source: rusqlite::Error },
|
||||
}
|
||||
|
||||
/// Ensure the search database is attached to the connection and
|
||||
/// initializes the data if needed.
|
||||
fn ensure_search_db(
|
||||
conn: &PooledConnection<SqliteConnectionManager>,
|
||||
db_path: &Path,
|
||||
) -> Result<(), EnsureSearchDbError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'")
|
||||
.context(PrepareEnsureSearchSnafu)?;
|
||||
let count: u64 = stmt
|
||||
.query_row([], |x| x.get(0))
|
||||
.context(ExecuteEnsureSearchSnafu)?;
|
||||
) -> Result<(), DataStoreError> {
|
||||
let mut stmt =
|
||||
conn.prepare("SELECT COUNT() FROM pragma_database_list WHERE name = 'search'")?;
|
||||
let count: u64 = stmt.query_row([], |x| x.get(0))?;
|
||||
let need_attachment = count == 0;
|
||||
|
||||
if need_attachment {
|
||||
attach(conn, db_path).context(AttachSnafu)?;
|
||||
init(conn).context(InitSnafu)?;
|
||||
attach(conn, db_path)?;
|
||||
init(conn)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -113,32 +56,29 @@ fn ensure_search_db(
|
|||
fn attach(
|
||||
conn: &PooledConnection<SqliteConnectionManager>,
|
||||
db_path: &Path,
|
||||
) -> Result<(), AttachError> {
|
||||
) -> Result<(), DataStoreError> {
|
||||
conn.execute(
|
||||
&format!("ATTACH DATABASE '{}' AS search", db_path.to_string_lossy()),
|
||||
[],
|
||||
)
|
||||
.context(ExecuteAttachSnafu)?;
|
||||
)?;
|
||||
init(conn)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Initialise the fts virtual table.
|
||||
fn init(conn: &PooledConnection<SqliteConnectionManager>) -> Result<(), InitError> {
|
||||
fn init(conn: &PooledConnection<SqliteConnectionManager>) -> Result<(), DataStoreError> {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'")
|
||||
.context(PrepareInitSnafu)?;
|
||||
let count: u64 = stmt.query_row([], |x| x.get(0)).context(ExecuteInitSnafu)?;
|
||||
.prepare("SELECT COUNT() FROM search.sqlite_master WHERE type='table' AND name = 'fts'")?;
|
||||
let count: u64 = stmt.query_row([], |x| x.get(0))?;
|
||||
let need_init = count == 0;
|
||||
|
||||
if need_init {
|
||||
conn.execute(
|
||||
"CREATE VIRTUAL TABLE search.fts USING fts5(book_id, data)",
|
||||
[],
|
||||
)
|
||||
.context(ExecuteInitSnafu)?;
|
||||
conn.execute(SEARCH_INIT_QUERY, [])
|
||||
.context(ExecuteInitSnafu)?;
|
||||
)?;
|
||||
conn.execute(SEARCH_INIT_QUERY, [])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -149,17 +89,15 @@ pub(crate) fn search(
|
|||
query: &str,
|
||||
pool: &Pool<SqliteConnectionManager>,
|
||||
search_db_path: &Path,
|
||||
) -> Result<Vec<Book>, SearchError> {
|
||||
let conn = pool.get().context(ConnectionSnafu)?;
|
||||
ensure_search_db(&conn, search_db_path).context(EnsureDbSnafu)?;
|
||||
) -> Result<Vec<Book>, DataStoreError> {
|
||||
let conn = pool.get()?;
|
||||
ensure_search_db(&conn, search_db_path)?;
|
||||
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank")
|
||||
.context(PrepareSearchSnafu)?;
|
||||
let mut stmt =
|
||||
conn.prepare("SELECT book_id FROM search.fts WHERE data MATCH (:query) ORDER BY rank")?;
|
||||
let params = named_params! { ":query": query };
|
||||
let books = stmt
|
||||
.query_map(params, |r| -> Result<u64, rusqlite::Error> { r.get(0) })
|
||||
.context(ExecuteSearchSnafu)?
|
||||
.query_map(params, |r| -> Result<u64, rusqlite::Error> { r.get(0) })?
|
||||
.filter_map(Result::ok)
|
||||
.filter_map(|id| Book::scalar_book(&conn, id).ok())
|
||||
.collect();
|
||||
|
|
|
@ -8,28 +8,23 @@ repository = { workspace = true }
|
|||
description = "A very simple ebook server for a calibre library, providing a html interface as well as an OPDS feed."
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.8.4", features = ["http2", "tracing"] }
|
||||
calibre-db = { path = "../calibre-db/", version = "0.1.0" }
|
||||
clap = { version = "4.5.40", features = ["derive", "env"] }
|
||||
image = { version = "0.25.6", default-features = false, features = ["jpeg", "rayon"] }
|
||||
mime_guess = "2.0.5"
|
||||
once_cell = "1.21.3"
|
||||
poem = { version = "3.0.1", features = ["embed", "static-files"] }
|
||||
rust-embed = "8.7.2"
|
||||
sha2 = "0.10.9"
|
||||
serde = { workspace = true }
|
||||
serde_json = "1.0.140"
|
||||
serde_with = "3.14.0"
|
||||
snafu = { workspace = true }
|
||||
tera = "1.20.0"
|
||||
thiserror = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tokio = { version = "1.45.1", features = ["signal", "fs", "rt-multi-thread", "macros"] }
|
||||
tokio = { version = "1.45.1", features = ["signal", "rt-multi-thread", "macros"] }
|
||||
tokio-util = "0.7.15"
|
||||
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
utoipa = { version = "5.4.0", features = ["axum_extras"] }
|
||||
utoipa-axum = "0.2.0"
|
||||
utoipa-swagger-ui = { version = "9.0.2", features = ["axum", "vendored"] }
|
||||
tracing-subscriber = "0.3.19"
|
||||
uuid = { version = "1.17.0", features = ["v4", "fast-rng"] }
|
||||
quick-xml = { version = "0.38.0", features = ["serialize"] }
|
||||
|
||||
|
|
|
@ -6,9 +6,8 @@ use std::{
|
|||
};
|
||||
|
||||
use ignore::Walk;
|
||||
use zip::{CompressionMethod, write::SimpleFileOptions};
|
||||
use zip::{write::SimpleFileOptions, CompressionMethod};
|
||||
|
||||
/// Create a zip archive of the source code.
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let out_dir = env::var("OUT_DIR")?;
|
||||
let src_dir = "..";
|
||||
|
|
|
@ -1,89 +0,0 @@
|
|||
use std::{io, net::SocketAddr};
|
||||
|
||||
use serde::Deserialize;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tokio::net::TcpListener;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
use utoipa_axum::router::OpenApiRouter;
|
||||
use utoipa_swagger_ui::SwaggerUi;
|
||||
|
||||
use crate::app_state::AppState;
|
||||
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod download;
|
||||
pub mod error;
|
||||
pub mod html;
|
||||
pub mod opds;
|
||||
pub mod paginated;
|
||||
pub mod routes;
|
||||
pub mod search;
|
||||
pub mod series;
|
||||
pub mod static_files;
|
||||
|
||||
/// How to sort query results.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "UPPERCASE")]
|
||||
pub enum SortOrder {
|
||||
ASC,
|
||||
DESC,
|
||||
}
|
||||
|
||||
impl From<SortOrder> for calibre_db::data::pagination::SortOrder {
|
||||
/// Convert the API sort order to the database sort order.
|
||||
fn from(val: SortOrder) -> Self {
|
||||
match val {
|
||||
SortOrder::ASC => calibre_db::data::pagination::SortOrder::ASC,
|
||||
SortOrder::DESC => calibre_db::data::pagination::SortOrder::DESC,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// OpenAPI tag for all endpoints.
|
||||
const TAG: &str = "html";
|
||||
const OPDS_TAG: &str = "opds";
|
||||
|
||||
/// OpenAPI documentation configuration.
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
components(
|
||||
schemas(
|
||||
SortOrder
|
||||
)
|
||||
),
|
||||
tags(
|
||||
(name = TAG, description = "Browser access to a calibre library."),
|
||||
(name = OPDS_TAG, description = "OPDS access to a calibre library.")
|
||||
)
|
||||
)]
|
||||
struct ApiDoc;
|
||||
|
||||
/// Errors that occur when starting the HTTP server.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum ServeError {
|
||||
#[snafu(display("Failed to bind to {address}."))]
|
||||
Bind {
|
||||
source: io::Error,
|
||||
address: SocketAddr,
|
||||
},
|
||||
#[snafu(display("Failed to run http server."))]
|
||||
Serve { source: io::Error },
|
||||
}
|
||||
|
||||
/// Start the HTTP API server with the given configuration.
|
||||
pub async fn serve(address: SocketAddr, state: AppState) -> Result<(), ServeError> {
|
||||
let (router, api) = OpenApiRouter::with_openapi(ApiDoc::openapi())
|
||||
.merge(routes::router(state))
|
||||
.split_for_parts();
|
||||
|
||||
let router =
|
||||
router.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", api.clone()));
|
||||
|
||||
let listener = TcpListener::bind(&address)
|
||||
.await
|
||||
.context(BindSnafu { address })?;
|
||||
|
||||
axum::serve(listener, router.into_make_service())
|
||||
.await
|
||||
.context(ServeSnafu)
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
use std::path::Path;
|
||||
|
||||
use calibre_db::{
|
||||
calibre::{Calibre, DataStoreError},
|
||||
data::author::Author,
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use super::SortOrder;
|
||||
use crate::data::book::Book;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving a single author.
|
||||
pub enum SingleAuthorError {
|
||||
/// A failure to fetch author data.
|
||||
#[snafu(display("Failed to fetch author data."))]
|
||||
AuthorData { source: DataStoreError },
|
||||
/// A failure to fetch books from the author.
|
||||
#[snafu(display("Failed to fetch books from author."))]
|
||||
BookData { source: DataStoreError },
|
||||
}
|
||||
|
||||
/// Retrieve a single author and all their books.
|
||||
pub async fn single(
|
||||
id: u64,
|
||||
calibre: &Calibre,
|
||||
library_path: &Path,
|
||||
) -> Result<(Author, Vec<Book>), SingleAuthorError> {
|
||||
let author = calibre.scalar_author(id).context(AuthorDataSnafu)?;
|
||||
let books = calibre
|
||||
.author_books(id, u32::MAX.into(), None, SortOrder::ASC.into())
|
||||
.context(BookDataSnafu)?;
|
||||
let books = books
|
||||
.iter()
|
||||
.filter_map(|x| Book::full_book(x, calibre, library_path))
|
||||
.collect::<Vec<Book>>();
|
||||
|
||||
Ok((author, books))
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
use std::path::Path;
|
||||
|
||||
use calibre_db::calibre::{Calibre, DataStoreError};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use crate::data::book::Book;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching recent books.
|
||||
pub enum RecentBooksError {
|
||||
/// A failure to fetch recent books.
|
||||
#[snafu(display("Failed to fetch recent books."))]
|
||||
RecentBooks { source: DataStoreError },
|
||||
}
|
||||
|
||||
/// Fetch recent books and enrich them with additional information.
|
||||
pub async fn recent(calibre: &Calibre, library_path: &Path) -> Result<Vec<Book>, RecentBooksError> {
|
||||
let recent_books = calibre.recent_books(25).context(RecentBooksSnafu)?;
|
||||
let recent_books = recent_books
|
||||
.iter()
|
||||
.filter_map(|x| Book::full_book(x, calibre, library_path))
|
||||
.collect::<Vec<Book>>();
|
||||
|
||||
Ok(recent_books)
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
use axum::{
|
||||
body::Body,
|
||||
http::{self, StatusCode, header},
|
||||
response::Response,
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tokio::io::AsyncRead;
|
||||
use tokio_util::io::ReaderStream;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when downloading a file.
|
||||
pub enum DownloadError {
|
||||
/// A failure to construct the response body.
|
||||
#[snafu(display("Failed to fetch cover."))]
|
||||
Body { source: http::Error },
|
||||
}
|
||||
|
||||
/// Handle a request for file.
|
||||
///
|
||||
/// Must not be used directly from a route as that makes it vulnerable to path traversal attacks.
|
||||
pub async fn handler<A: AsyncRead + Send + Unpin + 'static>(
|
||||
file_name: &str,
|
||||
reader: A,
|
||||
content_type: &str,
|
||||
) -> Result<Response, DownloadError> {
|
||||
let stream = ReaderStream::new(reader);
|
||||
let body = Body::from_stream(stream);
|
||||
|
||||
let response = Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(
|
||||
header::CONTENT_DISPOSITION,
|
||||
format!("filename=\"{file_name}\""),
|
||||
)
|
||||
.header(header::CONTENT_TYPE, content_type)
|
||||
.body(body)
|
||||
.context(BodySnafu)?;
|
||||
|
||||
Ok(response)
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
//! HTTP error handling and response formatting.
|
||||
|
||||
use axum::http::StatusCode;
|
||||
use serde::Serialize;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
/// Standard error response format for API endpoints.
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ErrorResponse {
|
||||
/// Unique identifier for tracking this error instance.
|
||||
pub id: String,
|
||||
/// Human-readable error message.
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
/// Map error types to HTTP status codes.
|
||||
pub trait HttpStatus {
|
||||
/// Return the appropriate HTTP status code for this error.
|
||||
fn status_code(&self) -> StatusCode;
|
||||
}
|
||||
|
||||
/// Generate IntoResponse implementation for error types with JSON formatting.
|
||||
#[macro_export]
|
||||
macro_rules! http_error {
|
||||
($error_type:ty) => {
|
||||
impl axum::response::IntoResponse for $error_type {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
let status = self.status_code();
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
tracing::error!("{}: {}", &id, snafu::Report::from_error(&self));
|
||||
|
||||
let error_response = $crate::api::error::ErrorResponse {
|
||||
id,
|
||||
error: self.to_string(),
|
||||
};
|
||||
|
||||
(status, axum::Json(error_response)).into_response()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
//! Handlers for HTML responses.
|
||||
pub mod archive;
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod cover;
|
||||
pub mod recent;
|
||||
pub mod search;
|
||||
pub mod series;
|
|
@ -1,47 +0,0 @@
|
|||
use axum::{http::StatusCode, response::Response};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use crate::{
|
||||
APP_NAME, APP_VERSION,
|
||||
api::{
|
||||
TAG,
|
||||
download::{self, DownloadError},
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
},
|
||||
http_error,
|
||||
};
|
||||
|
||||
const SOURCE_ARCHIVE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/archive.zip"));
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when downloading the source code archive.
|
||||
pub enum ArchiveError {
|
||||
/// A failure to stream the source code archive.
|
||||
#[snafu(display("Failed to stream source code archive."))]
|
||||
Download { source: DownloadError },
|
||||
}
|
||||
impl HttpStatus for ArchiveError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
ArchiveError::Download { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(ArchiveError);
|
||||
|
||||
/// Handle a request for source code of the server..
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/archive",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/zip"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler() -> Result<Response, ArchiveError> {
|
||||
let file_name = format!("{APP_NAME}-{APP_VERSION}.zip");
|
||||
download::handler(&file_name, SOURCE_ARCHIVE, "application/zip")
|
||||
.await
|
||||
.context(DownloadSnafu)
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse, Response},
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
SortOrder, TAG,
|
||||
authors::{self, SingleAuthorError},
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
paginated::{self, PaginationError},
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
templates::TEMPLATES,
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving authors.
|
||||
pub enum RetrieveError {
|
||||
/// A failure to fetch pagination data.
|
||||
#[snafu(display("Failed to fetch pagination data."))]
|
||||
Authors { source: AuthorError },
|
||||
}
|
||||
impl HttpStatus for RetrieveError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
RetrieveError::Authors { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(RetrieveError);
|
||||
|
||||
/// Get the first page of all authors.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/authors",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler_init(State(state): State<Arc<AppState>>) -> Result<Response, RetrieveError> {
|
||||
authors(&state, None, SortOrder::ASC)
|
||||
.await
|
||||
.context(AuthorsSnafu)
|
||||
}
|
||||
|
||||
/// Get a paginated list of all authors.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/authors/{cursor}/{sort_order}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(
|
||||
Path((cursor, sort_order)): Path<(String, SortOrder)>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, RetrieveError> {
|
||||
authors(&state, Some(&cursor), sort_order)
|
||||
.await
|
||||
.context(AuthorsSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching an author.
|
||||
pub enum AuthorError {
|
||||
/// A failure to fetch pagination data.
|
||||
#[snafu(display("Failed to fetch pagination data."))]
|
||||
Pagination { source: PaginationError },
|
||||
}
|
||||
|
||||
/// Render a paginated list of authors.
|
||||
async fn authors(
|
||||
state: &Arc<AppState>,
|
||||
cursor: Option<&str>,
|
||||
sort_order: SortOrder,
|
||||
) -> Result<Response, AuthorError> {
|
||||
paginated::render(
|
||||
"authors",
|
||||
|| state.calibre.authors(25, cursor, &sort_order.into()),
|
||||
|author| author.sort.clone(),
|
||||
|cursor| state.calibre.has_previous_authors(cursor),
|
||||
|cursor| state.calibre.has_more_authors(cursor),
|
||||
)
|
||||
.context(PaginationSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving a single author.
|
||||
pub enum SingleError {
|
||||
/// A failure to fetch author data.
|
||||
#[snafu(display("Failed to fetch author data."))]
|
||||
Data { source: SingleAuthorError },
|
||||
/// A failure to render the template.
|
||||
#[snafu(display("Failed to render template."))]
|
||||
Render { source: tera::Error },
|
||||
}
|
||||
impl HttpStatus for SingleError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SingleError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
SingleError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(SingleError);
|
||||
|
||||
/// Get a single author and all their books.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/authors/{id}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn single(
|
||||
Path(id): Path<u64>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, SingleError> {
|
||||
let (author, books) = authors::single(id, &state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(DataSnafu)?;
|
||||
|
||||
let mut context = Context::new();
|
||||
context.insert("title", &author.name);
|
||||
context.insert("nav", "authors");
|
||||
context.insert("books", &books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.context(RenderSnafu)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
|
@ -1,170 +0,0 @@
|
|||
//! Handle requests for books.
|
||||
use std::{io, sync::Arc};
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::Response,
|
||||
};
|
||||
use calibre_db::calibre::DataStoreError;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tokio::fs::File;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
SortOrder, TAG, download,
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
paginated::{self, PaginationError},
|
||||
},
|
||||
app_state::AppState,
|
||||
data::book::{Book, Format},
|
||||
http_error,
|
||||
opds::media_type::MediaType,
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving books.
|
||||
pub enum RetrieveError {
|
||||
/// A failure to fetch pagination data.
|
||||
#[snafu(display("Failed to fetch pagination data."))]
|
||||
Books { source: BookError },
|
||||
}
|
||||
impl HttpStatus for RetrieveError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
RetrieveError::Books { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(RetrieveError);
|
||||
|
||||
/// Get the first page of all books.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/books",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler_init(State(state): State<Arc<AppState>>) -> Result<Response, RetrieveError> {
|
||||
books(&state, None, SortOrder::ASC)
|
||||
.await
|
||||
.context(BooksSnafu)
|
||||
}
|
||||
|
||||
/// Get a paginated list of all books.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/books/{cursor}/{sort_order}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(
|
||||
Path((cursor, sort_order)): Path<(String, SortOrder)>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, RetrieveError> {
|
||||
books(&state, Some(&cursor), sort_order)
|
||||
.await
|
||||
.context(BooksSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a book.
|
||||
pub enum BookError {
|
||||
/// A failure to fetch pagination data.
|
||||
#[snafu(display("Failed to fetch pagination data."))]
|
||||
Pagination { source: PaginationError },
|
||||
}
|
||||
|
||||
/// Render a paginated list of books.
|
||||
async fn books(
|
||||
state: &Arc<AppState>,
|
||||
cursor: Option<&str>,
|
||||
sort_order: SortOrder,
|
||||
) -> Result<Response, BookError> {
|
||||
paginated::render(
|
||||
"books",
|
||||
|| {
|
||||
state
|
||||
.calibre
|
||||
.books(25, cursor, &sort_order.into())
|
||||
.map(|x| {
|
||||
x.iter()
|
||||
.filter_map(|y| {
|
||||
Book::full_book(y, &state.calibre, &state.config.library_path)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
},
|
||||
|book| book.data.sort.clone(),
|
||||
|cursor| state.calibre.has_previous_books(cursor),
|
||||
|cursor| state.calibre.has_more_books(cursor),
|
||||
)
|
||||
.context(PaginationSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when downloading a book.
|
||||
pub enum DownloadError {
|
||||
/// A failure to fetch book data.
|
||||
#[snafu(display("Failed to fetch book data."))]
|
||||
BookData { source: DataStoreError },
|
||||
/// The requested book was not found.
|
||||
#[snafu(display("No such book."))]
|
||||
NotFound,
|
||||
/// The requested book file was not found.
|
||||
#[snafu(display("No such book."))]
|
||||
FileNotFound { source: io::Error },
|
||||
/// A failure to stream the book file.
|
||||
#[snafu(display("Failed to stream book file."))]
|
||||
Stream { source: download::DownloadError },
|
||||
}
|
||||
impl HttpStatus for DownloadError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
DownloadError::BookData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
DownloadError::NotFound => StatusCode::NOT_FOUND,
|
||||
DownloadError::FileNotFound { source: _ } => StatusCode::NOT_FOUND,
|
||||
DownloadError::Stream { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(DownloadError);
|
||||
|
||||
/// Download a book file in the specified format.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/book/{id}/{format}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/*"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn download(
|
||||
Path((id, format)): Path<(u64, String)>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, DownloadError> {
|
||||
let book = state.calibre.scalar_book(id).context(BookDataSnafu)?;
|
||||
let book = Book::full_book(&book, &state.calibre, &state.config.library_path)
|
||||
.ok_or(NotFoundSnafu.build())?;
|
||||
let format = Format(format);
|
||||
let file_name = book.formats.get(&format).ok_or(NotFoundSnafu.build())?;
|
||||
let file_path = state
|
||||
.config
|
||||
.library_path
|
||||
.join(book.data.path)
|
||||
.join(file_name);
|
||||
let file = File::open(file_path).await.context(FileNotFoundSnafu)?;
|
||||
let content_type: MediaType = format.into();
|
||||
let content_type = format!("{content_type}");
|
||||
|
||||
download::handler(file_name, file, &content_type)
|
||||
.await
|
||||
.context(StreamSnafu)
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
//! Handle requests for book covers.
|
||||
use std::{fs::File, io, path::Path as FilePath, sync::Arc};
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::Response,
|
||||
};
|
||||
use calibre_db::calibre::{Calibre, DataStoreError};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tokio::fs::File as AsyncFile;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
TAG,
|
||||
download::{self, DownloadError},
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
},
|
||||
app_state::AppState,
|
||||
cache::{self, RetrieveThumbnailError},
|
||||
http_error,
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving a cover.
|
||||
pub enum RetrieveError {
|
||||
/// A failure to fetch the cover.
|
||||
#[snafu(display("Failed to fetch cover."))]
|
||||
Cover { source: CoverError },
|
||||
/// A failure to open the cover file.
|
||||
#[snafu(display("Failed to open cover."))]
|
||||
CoverOpen { source: io::Error },
|
||||
}
|
||||
impl HttpStatus for RetrieveError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
RetrieveError::Cover { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
RetrieveError::CoverOpen { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(RetrieveError);
|
||||
|
||||
/// Get the thumbnail for a book cover.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/cover/{id}/thumbnail",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "image/jpeg"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn thumbnail(
|
||||
Path(id): Path<u64>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, RetrieveError> {
|
||||
cover(
|
||||
&state.calibre,
|
||||
&state.config.library_path,
|
||||
&state.config.cache_path,
|
||||
id,
|
||||
|cover_path, cache_path| {
|
||||
cache::get_thumbnail(cover_path, cache_path).context(ThumbnailSnafu)
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context(CoverSnafu)
|
||||
}
|
||||
|
||||
/// Get the full-size cover for a book.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/cover/{id}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "image/jpeg"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn full(
|
||||
Path(id): Path<u64>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, RetrieveError> {
|
||||
cover(
|
||||
&state.calibre,
|
||||
&state.config.library_path,
|
||||
&state.config.cache_path,
|
||||
id,
|
||||
|cover_path, _| File::open(cover_path).context(FileOpenSnafu),
|
||||
)
|
||||
.await
|
||||
.context(CoverSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a cover.
|
||||
pub enum CoverError {
|
||||
/// A failure to fetch book data.
|
||||
#[snafu(display("Failed to fetch book data."))]
|
||||
BookData { source: DataStoreError },
|
||||
/// The requested cover was not found.
|
||||
#[snafu(display("No such cover"))]
|
||||
NotFound { source: CoverFetchError },
|
||||
/// A failure to stream the cover.
|
||||
#[snafu(display("Failed to stream cover."))]
|
||||
StreamCover { source: DownloadError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a cover file.
|
||||
pub enum CoverFetchError {
|
||||
/// A failure to fetch the cover thumbnail.
|
||||
#[snafu(display("Failed to fetch cover thumbnail."))]
|
||||
Thumbnail { source: RetrieveThumbnailError },
|
||||
/// A failure to open the cover file.
|
||||
#[snafu(display("Failed to open cover file."))]
|
||||
FileOpen { source: io::Error },
|
||||
}
|
||||
|
||||
/// Generic cover handler.
|
||||
async fn cover<F>(
|
||||
calibre: &Calibre,
|
||||
library_path: &FilePath,
|
||||
cache_path: &FilePath,
|
||||
id: u64,
|
||||
f: F,
|
||||
) -> Result<Response, CoverError>
|
||||
where
|
||||
F: Fn(&FilePath, &FilePath) -> Result<File, CoverFetchError>,
|
||||
{
|
||||
let book = calibre.scalar_book(id).context(BookDataSnafu)?;
|
||||
let cover_path = library_path.join(book.path).join("cover.jpg");
|
||||
|
||||
let cover = f(&cover_path, cache_path).context(NotFoundSnafu)?;
|
||||
let cover = AsyncFile::from_std(cover);
|
||||
download::handler("cover.jpg", cover, "image/jpeg")
|
||||
.await
|
||||
.context(StreamCoverSnafu)
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
//! Handle requests for recent books.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse, Response},
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
TAG,
|
||||
books::{self, RecentBooksError},
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
templates::TEMPLATES,
|
||||
};
|
||||
|
||||
/// Errors that occur during query processing.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum RecentError {
|
||||
#[snafu(display("Failed to fetch recent books."))]
|
||||
RecentBooks { source: RecentBooksError },
|
||||
#[snafu(display("Failed to render template."))]
|
||||
Template { source: tera::Error },
|
||||
}
|
||||
impl HttpStatus for RecentError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
RecentError::RecentBooks { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
RecentError::Template { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(RecentError);
|
||||
|
||||
/// Get recently added books
|
||||
///
|
||||
/// Provides a list of the 25 most recently added books.
|
||||
/// The format can be either HTML or an OPDS feed, depending on the `Accept` header.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/recent",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = 200, description = "List of recent books", content_type = "text/html"),
|
||||
(status = 500, description = "Error retrieving books from database", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, RecentError> {
|
||||
let recent_books = books::recent(&state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(RecentBooksSnafu)?;
|
||||
|
||||
let mut context = Context::new();
|
||||
context.insert("title", "");
|
||||
context.insert("nav", "recent");
|
||||
context.insert("books", &recent_books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.map(Html)
|
||||
.context(TemplateSnafu)?
|
||||
.into_response())
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse, Response},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
TAG,
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
search::{self, SearchQueryError},
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
templates::TEMPLATES,
|
||||
};
|
||||
|
||||
/// Errors that occur during query processing.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum SearchError {
|
||||
#[snafu(display("Failed to search for books."))]
|
||||
Query { source: SearchQueryError },
|
||||
#[snafu(display("Failed to render template."))]
|
||||
Template { source: tera::Error },
|
||||
}
|
||||
impl HttpStatus for SearchError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SearchError::Query { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
SearchError::Template { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(SearchError);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
/// Parameters for a search request.
|
||||
pub struct Params {
|
||||
/// Query for a search request.
|
||||
query: String,
|
||||
}
|
||||
/// Search for books based on a query.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/search",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = 200, content_type = "text/html"),
|
||||
(status = 500, description = "Error retrieving books from database", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(
|
||||
Query(params): Query<Params>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, SearchError> {
|
||||
let books = search::query(¶ms.query, &state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(QuerySnafu)?;
|
||||
|
||||
let mut context = Context::new();
|
||||
context.insert("title", "Search Results");
|
||||
context.insert("nav", "search");
|
||||
context.insert("books", &books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.context(TemplateSnafu)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse, Response},
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
SortOrder, TAG,
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
paginated::{self, PaginationError},
|
||||
series::{self, SingleSeriesError},
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
templates::TEMPLATES,
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving series.
|
||||
pub enum RetrieveError {
|
||||
/// A failure to fetch series data.
|
||||
#[snafu(display("Failed to fetch series data."))]
|
||||
Series { source: SeriesError },
|
||||
}
|
||||
impl HttpStatus for RetrieveError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
RetrieveError::Series { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(RetrieveError);
|
||||
|
||||
/// Get the first page of all series.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/series",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler_init(State(state): State<Arc<AppState>>) -> Result<Response, RetrieveError> {
|
||||
series(&state, None, SortOrder::ASC)
|
||||
.await
|
||||
.context(SeriesSnafu)
|
||||
}
|
||||
|
||||
/// Get a paginated list of all series.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/series/{cursor}/{sort_order}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(
|
||||
Path((cursor, sort_order)): Path<(String, SortOrder)>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, RetrieveError> {
|
||||
series(&state, Some(&cursor), sort_order)
|
||||
.await
|
||||
.context(SeriesSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when fetching a series.
|
||||
pub enum SeriesError {
|
||||
/// A failure to fetch pagination data.
|
||||
#[snafu(display("Failed to fetch pagination data."))]
|
||||
Pagination { source: PaginationError },
|
||||
}
|
||||
|
||||
/// Render a paginated list of series.
|
||||
async fn series(
|
||||
state: &Arc<AppState>,
|
||||
cursor: Option<&str>,
|
||||
sort_order: SortOrder,
|
||||
) -> Result<Response, SeriesError> {
|
||||
paginated::render(
|
||||
"series",
|
||||
|| state.calibre.series(25, cursor, &sort_order.into()),
|
||||
|series| series.sort.clone(),
|
||||
|cursor| state.calibre.has_previous_series(cursor),
|
||||
|cursor| state.calibre.has_more_series(cursor),
|
||||
)
|
||||
.context(PaginationSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving a single series.
|
||||
pub enum SingleError {
|
||||
/// A failure to fetch series data.
|
||||
#[snafu(display("Failed to fetch series data."))]
|
||||
Data { source: SingleSeriesError },
|
||||
/// A failure to render the template.
|
||||
#[snafu(display("Failed to render template."))]
|
||||
Render { source: tera::Error },
|
||||
}
|
||||
impl HttpStatus for SingleError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SingleError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
SingleError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(SingleError);
|
||||
|
||||
/// Get a single series and all its books.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/series/{id}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "text/html"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn single(
|
||||
Path(id): Path<u64>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, SingleError> {
|
||||
let (series, books) = series::single(id, &state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(DataSnafu)?;
|
||||
|
||||
let mut context = Context::new();
|
||||
context.insert("title", &series.name);
|
||||
context.insert("nav", "series");
|
||||
context.insert("books", &books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.context(RenderSnafu)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
//! Handlers for OPDS feeds.
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod recent;
|
||||
pub mod search;
|
||||
pub mod series;
|
|
@ -1,146 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::{StatusCode, header},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use calibre_db::{calibre::DataStoreError, data::author::Author as DbAuthor};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
APP_NAME,
|
||||
api::{
|
||||
SortOrder,
|
||||
authors::{self, SingleAuthorError},
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
OPDS_TAG,
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
opds::{
|
||||
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
|
||||
relation::Relation,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving all authors.
|
||||
pub enum AuthorsError {
|
||||
/// A failure to fetch author data.
|
||||
#[snafu(display("Failed to fetch author data."))]
|
||||
Data { source: DataStoreError },
|
||||
/// A failure to render author data.
|
||||
#[snafu(display("Failed to render author data."))]
|
||||
Render { source: AsXmlError },
|
||||
}
|
||||
impl HttpStatus for AuthorsError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
AuthorsError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthorsError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(AuthorsError);
|
||||
|
||||
/// Render all authors as OPDS entries embedded in a feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/authors",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, AuthorsError> {
|
||||
let authors: Vec<DbAuthor> = state
|
||||
.calibre
|
||||
.authors(u32::MAX.into(), None, &SortOrder::ASC.into())
|
||||
.context(DataSnafu)?;
|
||||
|
||||
let entries: Vec<Entry> = authors.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/authors".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:authors"),
|
||||
"All Authors",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().context(RenderSnafu)?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving a single author.
|
||||
pub enum SingleError {
|
||||
/// A failure to fetch author data.
|
||||
#[snafu(display("Failed to fetch author data."))]
|
||||
AuthorData { source: SingleAuthorError },
|
||||
/// A failure to render the feed.
|
||||
#[snafu(display("Failed to render feed."))]
|
||||
FeedRender { source: AsXmlError },
|
||||
}
|
||||
impl HttpStatus for SingleError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SingleError::AuthorData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
SingleError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(SingleError);
|
||||
|
||||
/// Render a single series as an OPDS entry embedded in a feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/authors/{id}",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn single(
|
||||
Path(id): Path<u64>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, SingleError> {
|
||||
let (author, books) = authors::single(id, &state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(AuthorDataSnafu)?;
|
||||
|
||||
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: format!("/opds/authors/{}", author.id),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}author:{}", author.id),
|
||||
&author.name,
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().context(FeedRenderSnafu)?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
|
||||
}
|
|
@ -1,102 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::{StatusCode, header},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use calibre_db::calibre::DataStoreError;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
APP_NAME,
|
||||
api::{
|
||||
SortOrder,
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
OPDS_TAG,
|
||||
},
|
||||
app_state::AppState,
|
||||
data::book::Book,
|
||||
http_error,
|
||||
opds::{
|
||||
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
|
||||
relation::Relation,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving books for OPDS.
|
||||
pub enum OdpsBooksError {
|
||||
/// A failure to fetch book data.
|
||||
#[snafu(display("Failed to fetch book data."))]
|
||||
Data { source: DataStoreError },
|
||||
/// A failure to render book data.
|
||||
#[snafu(display("Failed to render book data."))]
|
||||
Render { source: RenderError },
|
||||
}
|
||||
impl HttpStatus for OdpsBooksError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
OdpsBooksError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
OdpsBooksError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(OdpsBooksError);
|
||||
|
||||
/// Render all books as OPDS entries embedded in a feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/books",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, OdpsBooksError> {
|
||||
let books: Vec<Book> = state
|
||||
.calibre
|
||||
.books(u32::MAX.into(), None, &SortOrder::ASC.into())
|
||||
.map(|x| {
|
||||
x.iter()
|
||||
.filter_map(|y| Book::full_book(y, &state.calibre, &state.config.library_path))
|
||||
.collect()
|
||||
})
|
||||
.context(DataSnafu)?;
|
||||
render_books(books).await.context(RenderSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when rendering a feed.
|
||||
pub enum RenderError {
|
||||
/// A failure to create the OPDS feed.
|
||||
#[snafu(display("Failed to create opds feed."))]
|
||||
Feed { source: AsXmlError },
|
||||
}
|
||||
|
||||
/// Render a list of books as OPDS entries in a feed.
|
||||
pub(crate) async fn render_books(books: Vec<Book>) -> Result<Response, RenderError> {
|
||||
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/books".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:books"),
|
||||
"All Books",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().context(FeedSnafu)?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::{StatusCode, header},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
APP_NAME,
|
||||
api::{
|
||||
books::{self, RecentBooksError},
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
OPDS_TAG,
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
opds::{
|
||||
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
|
||||
relation::Relation,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving recent books.
|
||||
pub enum RecentError {
|
||||
/// A failure to fetch recent books.
|
||||
#[snafu(display("Failed to fetch recent books."))]
|
||||
Data { source: RecentBooksError },
|
||||
/// A failure to render the feed.
|
||||
#[snafu(display("Failed to render feed."))]
|
||||
Render { source: AsXmlError },
|
||||
}
|
||||
impl HttpStatus for RecentError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
RecentError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
RecentError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(RecentError);
|
||||
|
||||
/// Get recently added books as an OPDS feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/recent",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, RecentError> {
|
||||
let recent_books = books::recent(&state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(DataSnafu)?;
|
||||
let entries: Vec<Entry> = recent_books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/recent".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:recentbooks"),
|
||||
"Recent Books",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().context(RenderSnafu)?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
|
||||
}
|
|
@ -1,115 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
http::{StatusCode, header},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use super::books::{RenderError, render_books};
|
||||
use crate::{
|
||||
APP_NAME,
|
||||
api::{
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
search::{self, SearchQueryError},
|
||||
OPDS_TAG,
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
opds::{
|
||||
error::AsXmlError,
|
||||
search::{OpenSearchDescription, Url},
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when searching for books.
|
||||
pub enum SearchError {
|
||||
/// A failure to query for books.
|
||||
#[snafu(display("Failed to query books."))]
|
||||
Query { source: SearchQueryError },
|
||||
/// A failure to render the feed.
|
||||
#[snafu(display("Failed to render feed."))]
|
||||
Render { source: RenderError },
|
||||
}
|
||||
impl HttpStatus for SearchError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SearchError::Query { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
SearchError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(SearchError);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
/// Parameters for a search request.
|
||||
pub struct Params {
|
||||
/// Query for a search request.
|
||||
query: String,
|
||||
}
|
||||
/// Search for books and return results as an OPDS feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/search",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = 200, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Error retrieving books from database", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(
|
||||
Query(params): Query<Params>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, SearchError> {
|
||||
let books = search::query(¶ms.query, &state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(QuerySnafu)?;
|
||||
|
||||
render_books(books).await.context(RenderSnafu)
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving search information.
|
||||
pub enum InfoError {
|
||||
/// A failure to render the feed.
|
||||
#[snafu(display("Failed to render feed."))]
|
||||
FeedRender { source: AsXmlError },
|
||||
}
|
||||
impl HttpStatus for InfoError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
InfoError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
http_error!(InfoError);
|
||||
|
||||
/// Render search information as an OPDS feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/search/info",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = 200, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Internal error", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn info() -> Result<Response, InfoError> {
|
||||
let search = OpenSearchDescription {
|
||||
short_name: APP_NAME.to_string(),
|
||||
description: "Search for ebooks".to_string(),
|
||||
input_encoding: "UTF-8".to_string(),
|
||||
output_encoding: "UTF-8".to_string(),
|
||||
url: Url {
|
||||
type_name: "application/atom+xml".to_string(),
|
||||
template: "/opds/search?query={searchTerms}".to_string(),
|
||||
},
|
||||
};
|
||||
let xml = search.as_xml().context(FeedRenderSnafu)?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::{StatusCode, header},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use calibre_db::calibre::DataStoreError;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
APP_NAME,
|
||||
api::{
|
||||
SortOrder,
|
||||
error::{ErrorResponse, HttpStatus},
|
||||
series::{self, SingleSeriesError},
|
||||
OPDS_TAG,
|
||||
},
|
||||
app_state::AppState,
|
||||
http_error,
|
||||
opds::{
|
||||
entry::Entry, error::AsXmlError, feed::Feed, link::Link, media_type::MediaType,
|
||||
relation::Relation,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving all series.
|
||||
pub enum SeriesError {
|
||||
/// A failure to fetch series data.
|
||||
#[snafu(display("Failed to fetch series data."))]
|
||||
Data { source: DataStoreError },
|
||||
/// A failure to render series data.
|
||||
#[snafu(display("Failed to render series data."))]
|
||||
Render { source: AsXmlError },
|
||||
}
|
||||
impl HttpStatus for SeriesError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SeriesError::Data { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
SeriesError::Render { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(SeriesError);
|
||||
|
||||
/// Render all series as OPDS entries embedded in a feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/series",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(State(state): State<Arc<AppState>>) -> Result<Response, SeriesError> {
|
||||
let series = state
|
||||
.calibre
|
||||
.series(u32::MAX.into(), None, &SortOrder::ASC.into())
|
||||
.context(DataSnafu)?;
|
||||
|
||||
let entries: Vec<Entry> = series.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/series".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:series"),
|
||||
"All Series",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().context(RenderSnafu)?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving a single series.
|
||||
pub enum SingleError {
|
||||
/// A failure to fetch series data.
|
||||
#[snafu(display("Failed to fetch series data."))]
|
||||
SeriesData { source: SingleSeriesError },
|
||||
/// A failure to render the feed.
|
||||
#[snafu(display("Failed to render feed."))]
|
||||
FeedRender { source: AsXmlError },
|
||||
}
|
||||
impl HttpStatus for SingleError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
SingleError::SeriesData { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
SingleError::FeedRender { source: _ } => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
http_error!(SingleError);
|
||||
|
||||
/// Render a single series as an OPDS entry embedded in a feed.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/series/{id}",
|
||||
tag = OPDS_TAG,
|
||||
responses(
|
||||
(status = OK, content_type = "application/atom+xml"),
|
||||
(status = 500, description = "Server failure.", body = ErrorResponse)
|
||||
)
|
||||
)]
|
||||
pub async fn single(
|
||||
Path(id): Path<u64>,
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Result<Response, SingleError> {
|
||||
let (series, books) = series::single(id, &state.calibre, &state.config.library_path)
|
||||
.await
|
||||
.context(SeriesDataSnafu)?;
|
||||
|
||||
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: format!("/opds/series/{}", series.id),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:series:{}", series.id),
|
||||
&series.name,
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().context(FeedRenderSnafu)?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/atom+xml")], xml).into_response())
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
//! HTTP route definitions and router configuration.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{response::Redirect, routing::get};
|
||||
use tower_http::trace::TraceLayer;
|
||||
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||
|
||||
use crate::{
|
||||
api::{html, opds, static_files},
|
||||
app_state::AppState,
|
||||
};
|
||||
|
||||
/// Create the main API router with all endpoints and middleware.
|
||||
pub fn router(state: AppState) -> OpenApiRouter {
|
||||
let store = Arc::new(state);
|
||||
|
||||
let opds_routes = OpenApiRouter::new()
|
||||
.routes(routes!(opds::books::handler))
|
||||
.routes(routes!(opds::recent::handler))
|
||||
.routes(routes!(opds::series::handler))
|
||||
.routes(routes!(opds::series::single))
|
||||
.routes(routes!(opds::authors::handler))
|
||||
.routes(routes!(opds::authors::single))
|
||||
.routes(routes!(opds::search::handler))
|
||||
.routes(routes!(opds::search::info))
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.with_state(store.clone());
|
||||
|
||||
let html_routes = OpenApiRouter::new()
|
||||
.route("/", get(|| async { Redirect::permanent("/recent") }))
|
||||
.routes(routes!(html::recent::handler))
|
||||
.routes(routes!(html::books::handler_init))
|
||||
.routes(routes!(html::books::handler))
|
||||
.routes(routes!(html::books::download))
|
||||
.routes(routes!(html::series::handler_init))
|
||||
.routes(routes!(html::series::handler))
|
||||
.routes(routes!(html::series::single))
|
||||
.routes(routes!(html::authors::handler_init))
|
||||
.routes(routes!(html::authors::handler))
|
||||
.routes(routes!(html::authors::single))
|
||||
.routes(routes!(html::cover::thumbnail))
|
||||
.routes(routes!(html::cover::full))
|
||||
.routes(routes!(html::search::handler))
|
||||
.routes(routes!(html::archive::handler))
|
||||
.routes(routes!(static_files::handler))
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.with_state(store.clone());
|
||||
|
||||
OpenApiRouter::new()
|
||||
.merge(html_routes)
|
||||
.nest("/opds", opds_routes)
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
use std::path::Path;
|
||||
|
||||
use calibre_db::calibre::{Calibre, DataStoreError};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use crate::data::book::Book;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when querying for books.
|
||||
pub enum SearchQueryError {
|
||||
/// A failure to query the database.
|
||||
#[snafu(display("Failed to search for books."))]
|
||||
Db { source: DataStoreError },
|
||||
}
|
||||
|
||||
/// Query for books and enrich them with additional information.
|
||||
pub async fn query(
|
||||
query: &str,
|
||||
calibre: &Calibre,
|
||||
library_path: &Path,
|
||||
) -> Result<Vec<Book>, SearchQueryError> {
|
||||
let books = calibre
|
||||
.search(query)
|
||||
.context(DbSnafu)?
|
||||
.iter()
|
||||
.filter_map(|book| Book::full_book(book, calibre, library_path))
|
||||
.collect();
|
||||
|
||||
Ok(books)
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
use std::path::Path;
|
||||
|
||||
use calibre_db::{
|
||||
calibre::{Calibre, DataStoreError},
|
||||
data::series::Series,
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
|
||||
use crate::data::book::Book;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur when retrieving a single series.
|
||||
pub enum SingleSeriesError {
|
||||
/// A failure to fetch series data.
|
||||
#[snafu(display("Failed to fetch series data."))]
|
||||
SeriesData { source: DataStoreError },
|
||||
/// A failure to fetch books in the series.
|
||||
#[snafu(display("Failed to fetch books in series."))]
|
||||
BookData { source: DataStoreError },
|
||||
}
|
||||
|
||||
/// Retrieve a single series and all its books.
|
||||
pub async fn single(
|
||||
id: u64,
|
||||
calibre: &Calibre,
|
||||
library_path: &Path,
|
||||
) -> Result<(Series, Vec<Book>), SingleSeriesError> {
|
||||
let series = calibre.scalar_series(id).context(SeriesDataSnafu)?;
|
||||
let books = calibre.series_books(id).context(BookDataSnafu)?;
|
||||
let books = books
|
||||
.iter()
|
||||
.filter_map(|x| Book::full_book(x, calibre, library_path))
|
||||
.collect::<Vec<Book>>();
|
||||
|
||||
Ok((series, books))
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
use axum::{
|
||||
http::{StatusCode, Uri, header},
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use rust_embed::RustEmbed;
|
||||
|
||||
use super::TAG;
|
||||
|
||||
/// Embedd static files.
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "static"]
|
||||
pub struct Files;
|
||||
|
||||
/// Get static file from the 'static' folder.
|
||||
/// Serve a static file.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/static/{*file}",
|
||||
tag = TAG,
|
||||
responses(
|
||||
(status = 200, description = "Static file"),
|
||||
(status = 404, description = "No such file within 'static'", body = String)
|
||||
)
|
||||
)]
|
||||
pub async fn handler(uri: Uri) -> impl IntoResponse {
|
||||
let mut path = uri.path().trim_start_matches('/').to_string();
|
||||
|
||||
if path.starts_with("static/") {
|
||||
path = path.replace("static/", "");
|
||||
}
|
||||
|
||||
StaticFile(path)
|
||||
}
|
||||
|
||||
/// A wrapper type for static files.
|
||||
pub struct StaticFile<T>(pub T);
|
||||
|
||||
impl<T> IntoResponse for StaticFile<T>
|
||||
where
|
||||
T: Into<String>,
|
||||
{
|
||||
fn into_response(self) -> Response {
|
||||
let path = self.0.into();
|
||||
|
||||
match Files::get(path.as_str()) {
|
||||
Some(content) => {
|
||||
let mime = mime_guess::from_path(path).first_or_octet_stream();
|
||||
([(header::CONTENT_TYPE, mime.as_ref())], content.data).into_response()
|
||||
}
|
||||
None => (StatusCode::NOT_FOUND, "404 Not Found").into_response(),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,38 +1,43 @@
|
|||
//! Handle caching of files, specifically book covers.
|
||||
|
||||
use std::{
|
||||
fmt,
|
||||
fmt::Write,
|
||||
fs::{self, File},
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use sha2::{
|
||||
Digest, Sha256,
|
||||
digest::{generic_array::GenericArray, typenum::U32},
|
||||
Digest, Sha256,
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use std::fmt::Write;
|
||||
use thiserror::Error;
|
||||
use tracing::debug;
|
||||
|
||||
/// Errors from converting a hash to its string representation.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum HashToPathError {
|
||||
#[snafu(display("Failed to generate string representation of hash."))]
|
||||
ToString { source: fmt::Error },
|
||||
/// Errors from dealing with file caching.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CacheError {
|
||||
/// Error converting a hash to its string representation.
|
||||
#[error("failed to access thumbnail")]
|
||||
HashError(#[from] std::fmt::Error),
|
||||
/// Error creating a thumbnail for an image..
|
||||
#[error("failed to create thumbnail")]
|
||||
ImageError(#[from] image::ImageError),
|
||||
/// Error accessing a thumbnail.
|
||||
#[error("failed to access thumbnail")]
|
||||
ThumbnailAccessError(#[from] std::io::Error),
|
||||
/// Error accessing thumbnail directories.
|
||||
#[error("failed to access thumbnail directory")]
|
||||
ThumbnailPathError(PathBuf),
|
||||
}
|
||||
|
||||
/// Convert a hash into its path representation inside the cache directory.
|
||||
///
|
||||
/// First hash character is the top folder, second character the second level folder and the rest
|
||||
/// is the filename.
|
||||
fn hash_to_path(
|
||||
hash: GenericArray<u8, U32>,
|
||||
cache_path: &Path,
|
||||
) -> Result<PathBuf, HashToPathError> {
|
||||
fn hash_to_path(hash: GenericArray<u8, U32>, cache_path: &Path) -> Result<PathBuf, CacheError> {
|
||||
let mut hash_string = String::new();
|
||||
for byte in hash {
|
||||
write!(&mut hash_string, "{:02x}", byte).context(ToStringSnafu)?;
|
||||
write!(&mut hash_string, "{:02x}", byte)?;
|
||||
}
|
||||
let hash = hash_string;
|
||||
|
||||
|
@ -46,78 +51,37 @@ fn hash_to_path(
|
|||
.join(remaining_segment))
|
||||
}
|
||||
|
||||
/// Errors from creating a thumbnail.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum CreateThumbnailError {
|
||||
#[snafu(display("No parent for {path}."))]
|
||||
ParentDir { path: String },
|
||||
#[snafu(display("Failed to create thumbnail path {path}."))]
|
||||
ThumbnailDir { source: io::Error, path: String },
|
||||
#[snafu(display("Failed to open image {path}."))]
|
||||
ImageOpen {
|
||||
source: image::ImageError,
|
||||
path: String,
|
||||
},
|
||||
#[snafu(display("Failed to save image to path {path}."))]
|
||||
ImageSave {
|
||||
source: image::ImageError,
|
||||
path: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Create a thumbnail for `cover_path` at `thumbnail_path`.
|
||||
fn create_thumbnail(cover_path: &Path, thumbnail_path: &Path) -> Result<(), CreateThumbnailError> {
|
||||
fn create_thumbnail(cover_path: &Path, thumbnail_path: &Path) -> Result<(), CacheError> {
|
||||
debug!("creating thumbnail for {}", cover_path.to_string_lossy());
|
||||
let folders = thumbnail_path.parent().ok_or_else(|| {
|
||||
ParentDirSnafu {
|
||||
path: thumbnail_path.to_string_lossy(),
|
||||
}
|
||||
.build()
|
||||
})?;
|
||||
fs::create_dir_all(folders).context(ThumbnailDirSnafu {
|
||||
path: folders.to_string_lossy(),
|
||||
})?;
|
||||
let folders = thumbnail_path
|
||||
.parent()
|
||||
.ok_or_else(|| CacheError::ThumbnailPathError(thumbnail_path.to_path_buf()))?;
|
||||
fs::create_dir_all(folders)?;
|
||||
|
||||
const THUMBNAIL_SIZE: u32 = 512;
|
||||
let img = image::open(cover_path).context(ImageOpenSnafu {
|
||||
path: cover_path.to_string_lossy(),
|
||||
})?;
|
||||
let img = image::open(cover_path)?;
|
||||
let thumbnail = img.thumbnail(THUMBNAIL_SIZE, THUMBNAIL_SIZE);
|
||||
thumbnail
|
||||
.save_with_format(thumbnail_path, image::ImageFormat::Jpeg)
|
||||
.context(ImageSaveSnafu {
|
||||
path: thumbnail_path.to_string_lossy(),
|
||||
})?;
|
||||
thumbnail.save_with_format(thumbnail_path, image::ImageFormat::Jpeg)?;
|
||||
debug!("saved thumbnail to {}", thumbnail_path.to_string_lossy());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Errors from retrieving a thumbnail.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum RetrieveThumbnailError {
|
||||
#[snafu(display("Failed to convert hash to string."))]
|
||||
HashToPath { source: HashToPathError },
|
||||
#[snafu(display("Failed to create not yet existing thumbnail."))]
|
||||
CreateThumbnail { source: CreateThumbnailError },
|
||||
#[snafu(display("Failed to open thumbnail."))]
|
||||
OpenThumbnail { source: io::Error },
|
||||
}
|
||||
|
||||
/// Get the thumbnail for a book cover.
|
||||
///
|
||||
/// If a thumbnail does not yet exist, create it.
|
||||
pub fn get_thumbnail(cover_path: &Path, cache_path: &Path) -> Result<File, RetrieveThumbnailError> {
|
||||
pub fn get_thumbnail(cover_path: &Path, cache_path: &Path) -> Result<File, CacheError> {
|
||||
let path_str = cover_path.to_string_lossy();
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(path_str.as_bytes());
|
||||
let hash = hasher.finalize();
|
||||
|
||||
let thumbnail_path = hash_to_path(hash, cache_path).context(HashToPathSnafu)?;
|
||||
let thumbnail_path = hash_to_path(hash, cache_path)?;
|
||||
|
||||
if !thumbnail_path.exists() {
|
||||
create_thumbnail(cover_path, &thumbnail_path).context(CreateThumbnailSnafu)?;
|
||||
create_thumbnail(cover_path, &thumbnail_path)?;
|
||||
}
|
||||
|
||||
File::open(thumbnail_path).context(OpenThumbnailSnafu)
|
||||
Ok(File::open(thumbnail_path)?)
|
||||
}
|
||||
|
|
|
@ -2,36 +2,31 @@
|
|||
|
||||
use std::{
|
||||
env, fs, io,
|
||||
net::{SocketAddr, ToSocketAddrs},
|
||||
net::SocketAddr,
|
||||
net::ToSocketAddrs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use thiserror::Error;
|
||||
use tracing::info;
|
||||
|
||||
use crate::cli::Cli;
|
||||
|
||||
/// Errors from loading application configuration.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum LoadError {
|
||||
/// The provided path is not a calibre library.
|
||||
#[snafu(display("{path} is not a calibre library."))]
|
||||
LibraryPath { path: String },
|
||||
/// Could not find the calibre metadata database.
|
||||
#[snafu(display("Could not find calibre metadata at {path}."))]
|
||||
MetadataPath { path: String },
|
||||
/// The listening address could not be parsed.
|
||||
#[snafu(display("Invalid listening address {listen_address}."))]
|
||||
ListeningAddressParse {
|
||||
source: io::Error,
|
||||
listen_address: String,
|
||||
},
|
||||
/// The listening address is invalid.
|
||||
#[snafu(display("Invalid listening address {listen_address}."))]
|
||||
ListeningAddress { listen_address: String },
|
||||
/// The cache directory could not be created.
|
||||
#[snafu(display("Failed to create cach directory at {path}."))]
|
||||
CacheDir { source: io::Error, path: String },
|
||||
/// Errors when dealing with application configuration.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ConfigError {
|
||||
/// Calibre library path does not exist.
|
||||
#[error("no folder at {0}")]
|
||||
LibraryPathNotFound(String),
|
||||
/// Calibre database does not exist.
|
||||
#[error("no metadata.db in {0}")]
|
||||
MetadataNotFound(String),
|
||||
/// Error converting a string to a listening address.
|
||||
#[error("failed to convert into listening address")]
|
||||
ListeningAddressError(String),
|
||||
/// Error accessing the configured cache path.
|
||||
#[error("failed to access cache path")]
|
||||
CachePathError(#[from] io::Error),
|
||||
}
|
||||
|
||||
/// Application configuration.
|
||||
|
@ -49,7 +44,7 @@ pub struct Config {
|
|||
|
||||
impl Config {
|
||||
/// Check if the calibre library from `args` exists and if the calibre database can be found.
|
||||
pub fn load(args: &Cli) -> Result<Self, LoadError> {
|
||||
pub fn load(args: &Cli) -> Result<Self, ConfigError> {
|
||||
let library_path = Path::new(&args.library_path).to_path_buf();
|
||||
|
||||
if !library_path.exists() {
|
||||
|
@ -58,7 +53,7 @@ impl Config {
|
|||
.to_str()
|
||||
.unwrap_or("<failed to parse path>")
|
||||
.to_string();
|
||||
return LibraryPathSnafu { path: library_path }.fail();
|
||||
return Err(ConfigError::LibraryPathNotFound(library_path));
|
||||
}
|
||||
|
||||
let metadata_path = library_path.join("metadata.db");
|
||||
|
@ -68,24 +63,18 @@ impl Config {
|
|||
.to_str()
|
||||
.unwrap_or("<failed to parse path>")
|
||||
.to_string();
|
||||
return MetadataPathSnafu {
|
||||
path: metadata_path,
|
||||
}
|
||||
.fail();
|
||||
return Err(ConfigError::MetadataNotFound(metadata_path));
|
||||
}
|
||||
let listen_address = args
|
||||
.listen_address
|
||||
.to_socket_addrs()
|
||||
.context(ListeningAddressParseSnafu {
|
||||
listen_address: args.listen_address.clone(),
|
||||
.map_err(|e| {
|
||||
ConfigError::ListeningAddressError(format!("{}: {e:?}", args.listen_address))
|
||||
})?
|
||||
.next()
|
||||
.ok_or(
|
||||
ListeningAddressSnafu {
|
||||
listen_address: args.listen_address.clone(),
|
||||
}
|
||||
.build(),
|
||||
)?;
|
||||
.ok_or(ConfigError::ListeningAddressError(
|
||||
args.listen_address.clone(),
|
||||
))?;
|
||||
|
||||
let cache_path = if args.cache_path.starts_with("$TMP") {
|
||||
let cache_base = env::var("XDG_CACHE_HOME")
|
||||
|
@ -94,9 +83,7 @@ impl Config {
|
|||
} else {
|
||||
PathBuf::from(&args.cache_path)
|
||||
};
|
||||
fs::create_dir_all(&cache_path).context(CacheDirSnafu {
|
||||
path: cache_path.to_string_lossy(),
|
||||
})?;
|
||||
fs::create_dir_all(&cache_path)?;
|
||||
info!("Using {} for cache", cache_path.to_string_lossy());
|
||||
|
||||
Ok(Self {
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
//! Data types and functions for enriching calibre data.
|
||||
pub mod book;
|
|
@ -2,12 +2,13 @@
|
|||
|
||||
use std::{collections::HashMap, fmt::Display, path::Path};
|
||||
|
||||
use calibre_db::{
|
||||
calibre::Calibre,
|
||||
data::{author::Author as DbAuthor, book::Book as DbBook, series::Series as DbSeries},
|
||||
use calibre_db::data::{
|
||||
author::Author as DbAuthor, book::Book as DbBook, series::Series as DbSeries,
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::app_state::AppState;
|
||||
|
||||
/// Wrapper type for a file format string (must be a struct in order to implement traits).
|
||||
#[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)]
|
||||
pub struct Format(pub String);
|
||||
|
@ -91,10 +92,10 @@ impl Book {
|
|||
|
||||
/// Wrap a [`DbBook`](struct@calibre_db::data::book::Book) in a [`Book`](struct@Book) by
|
||||
/// fetching additional information about author, formats and series.
|
||||
pub fn full_book(book: &DbBook, calibre: &Calibre, library_path: &Path) -> Option<Book> {
|
||||
let formats = Book::formats(book, library_path);
|
||||
let author = calibre.book_author(book.id).ok()?;
|
||||
let series = calibre.book_series(book.id).ok()?;
|
||||
pub fn full_book(book: &DbBook, state: &AppState) -> Option<Book> {
|
||||
let formats = Book::formats(book, &state.config.library_path);
|
||||
let author = state.calibre.book_author(book.id).ok()?;
|
||||
let series = state.calibre.book_series(book.id).ok()?;
|
||||
Some(Book::from_db_book(book, series, author, formats))
|
||||
}
|
||||
}
|
||||
|
|
38
little-hesinde/src/handlers/author.rs
Normal file
38
little-hesinde/src/handlers/author.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
//! Handle requests for a single author.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use calibre_db::data::pagination::SortOrder;
|
||||
use poem::{
|
||||
handler,
|
||||
web::{Data, Path},
|
||||
Response,
|
||||
};
|
||||
|
||||
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
|
||||
|
||||
/// Handle a request for an author with `id` and decide whether to render to html or OPDS.
|
||||
#[handler]
|
||||
pub async fn handler(
|
||||
id: Path<u64>,
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let author = state
|
||||
.calibre
|
||||
.scalar_author(*id)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
let books = state
|
||||
.calibre
|
||||
.author_books(*id, u32::MAX.into(), None, SortOrder::ASC)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
let books = books
|
||||
.iter()
|
||||
.filter_map(|x| Book::full_book(x, &state))
|
||||
.collect::<Vec<Book>>();
|
||||
|
||||
match accept.0 {
|
||||
Accept::Html => crate::handlers::html::author::handler(author, books).await,
|
||||
Accept::Opds => crate::handlers::opds::author::handler(author, books).await,
|
||||
}
|
||||
}
|
44
little-hesinde/src/handlers/authors.rs
Normal file
44
little-hesinde/src/handlers/authors.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
//! Handle requests for multiple authors.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
|
||||
use poem::{
|
||||
handler,
|
||||
web::{Data, Path},
|
||||
Response,
|
||||
};
|
||||
|
||||
use crate::{app_state::AppState, Accept};
|
||||
|
||||
/// Handle a request for multiple authors, starting at the first.
|
||||
#[handler]
|
||||
pub async fn handler_init(
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
authors(&accept, &state.calibre, None, &SortOrder::ASC).await
|
||||
}
|
||||
|
||||
/// Handle a request for multiple authors, starting at the `cursor` and going in the direction of
|
||||
/// `sort_order`.
|
||||
#[handler]
|
||||
pub async fn handler(
|
||||
Path((cursor, sort_order)): Path<(String, SortOrder)>,
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
authors(&accept, &state.calibre, Some(&cursor), &sort_order).await
|
||||
}
|
||||
|
||||
async fn authors(
|
||||
acccept: &Accept,
|
||||
calibre: &Calibre,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
match acccept {
|
||||
Accept::Html => crate::handlers::html::authors::handler(calibre, cursor, sort_order).await,
|
||||
Accept::Opds => crate::handlers::opds::authors::handler(calibre, cursor, sort_order).await,
|
||||
}
|
||||
}
|
77
little-hesinde/src/handlers/books.rs
Normal file
77
little-hesinde/src/handlers/books.rs
Normal file
|
@ -0,0 +1,77 @@
|
|||
//! Handle requests for multiple books.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use calibre_db::data::pagination::SortOrder;
|
||||
use poem::{
|
||||
error::NotFoundError,
|
||||
handler,
|
||||
web::{Data, Path},
|
||||
Response,
|
||||
};
|
||||
use tokio::fs::File;
|
||||
|
||||
use crate::{
|
||||
app_state::AppState,
|
||||
data::book::{Book, Format},
|
||||
handlers::error::HandlerError,
|
||||
opds::media_type::MediaType,
|
||||
Accept,
|
||||
};
|
||||
|
||||
/// Handle a request for multiple books, starting at the first.
|
||||
#[handler]
|
||||
pub async fn handler_init(
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
books(&accept, &state, None, &SortOrder::ASC).await
|
||||
}
|
||||
|
||||
/// Handle a request for multiple books, starting at the `cursor` and going in the direction of
|
||||
/// `sort_order`.
|
||||
#[handler]
|
||||
pub async fn handler(
|
||||
Path((cursor, sort_order)): Path<(String, SortOrder)>,
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
books(&accept, &state, Some(&cursor), &sort_order).await
|
||||
}
|
||||
|
||||
/// Handle a request for a book with id `id` in format `format`.
|
||||
#[handler]
|
||||
pub async fn handler_download(
|
||||
Path((id, format)): Path<(u64, String)>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let book = state
|
||||
.calibre
|
||||
.scalar_book(id)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
let book = Book::full_book(&book, &state).ok_or(NotFoundError)?;
|
||||
let format = Format(format);
|
||||
let file_name = book.formats.get(&format).ok_or(NotFoundError)?;
|
||||
let file_path = state
|
||||
.config
|
||||
.library_path
|
||||
.join(book.data.path)
|
||||
.join(file_name);
|
||||
let mut file = File::open(file_path).await.map_err(|_| NotFoundError)?;
|
||||
let content_type: MediaType = format.into();
|
||||
let content_type = format!("{content_type}");
|
||||
|
||||
crate::handlers::download::handler(file_name, file, &content_type).await
|
||||
}
|
||||
|
||||
async fn books(
|
||||
accept: &Accept,
|
||||
state: &Arc<AppState>,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
match accept {
|
||||
Accept::Html => crate::handlers::html::books::handler(state, cursor, sort_order).await,
|
||||
Accept::Opds => crate::handlers::opds::books::handler(state, cursor, sort_order).await,
|
||||
}
|
||||
}
|
74
little-hesinde/src/handlers/cover.rs
Normal file
74
little-hesinde/src/handlers/cover.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
//! Handle requests for cover images.
|
||||
|
||||
use std::{fs::File, path::Path as FilePath, sync::Arc};
|
||||
|
||||
use crate::{
|
||||
app_state::AppState,
|
||||
cache::{self, CacheError},
|
||||
config::Config,
|
||||
handlers::error::HandlerError,
|
||||
};
|
||||
use calibre_db::calibre::Calibre;
|
||||
use poem::{
|
||||
error::NotFoundError,
|
||||
handler,
|
||||
web::{headers::ContentType, Data, Path},
|
||||
Response,
|
||||
};
|
||||
use thiserror::Error;
|
||||
use tokio::fs::File as AsyncFile;
|
||||
|
||||
/// Errors from fetching cover images.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CoverError {
|
||||
/// Error fetching a cover thumbnail.
|
||||
#[error("failed to access thumbnail")]
|
||||
ThumbnailError(#[from] CacheError),
|
||||
/// Error fetching a full cover.
|
||||
#[error("failed access cover")]
|
||||
FullCoverError(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
/// Handle a request for the cover thumbnail of book with id `id`.
|
||||
#[handler]
|
||||
pub async fn handler_thumbnail(
|
||||
id: Path<u64>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
cover(
|
||||
&state.calibre,
|
||||
&state.config,
|
||||
*id,
|
||||
|cover_path, cache_path| Ok(cache::get_thumbnail(cover_path, cache_path)?),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Handle a request for the cover image of book with id `id`.
|
||||
#[handler]
|
||||
pub async fn handler_full(
|
||||
id: Path<u64>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
cover(&state.calibre, &state.config, *id, |cover_path, _| {
|
||||
Ok(File::open(cover_path)?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn cover<F>(
|
||||
calibre: &Calibre,
|
||||
config: &Config,
|
||||
id: u64,
|
||||
f: F,
|
||||
) -> Result<Response, poem::Error>
|
||||
where
|
||||
F: Fn(&FilePath, &FilePath) -> Result<File, CoverError>,
|
||||
{
|
||||
let book = calibre.scalar_book(id).map_err(HandlerError::DataError)?;
|
||||
let cover_path = config.library_path.join(book.path).join("cover.jpg");
|
||||
|
||||
let cover = f(&cover_path, &config.cache_path).map_err(|_| NotFoundError)?;
|
||||
let cover = AsyncFile::from_std(cover);
|
||||
crate::handlers::download::handler("cover.jpg", cover, &ContentType::jpeg().to_string()).await
|
||||
}
|
23
little-hesinde/src/handlers/download.rs
Normal file
23
little-hesinde/src/handlers/download.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
//! Handle requests for specific formats of a book.
|
||||
|
||||
use tokio::io::AsyncRead;
|
||||
|
||||
use poem::{Body, IntoResponse, Response};
|
||||
use tokio_util::io::ReaderStream;
|
||||
|
||||
/// Handle a request for file.
|
||||
///
|
||||
/// Must not be used directly from a route as that makes it vulnerable to path traversal attacks.
|
||||
pub async fn handler<A: AsyncRead + Send + 'static>(
|
||||
file_name: &str,
|
||||
reader: A,
|
||||
content_type: &str,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let stream = ReaderStream::new(reader);
|
||||
let body = Body::from_bytes_stream(stream);
|
||||
|
||||
Ok(body
|
||||
.with_content_type(content_type)
|
||||
.with_header("Content-Disposition", format!("filename=\"{file_name}\""))
|
||||
.into_response())
|
||||
}
|
57
little-hesinde/src/handlers/error.rs
Normal file
57
little-hesinde/src/handlers/error.rs
Normal file
|
@ -0,0 +1,57 @@
|
|||
//! Error handling for requests handlers.
|
||||
|
||||
use calibre_db::data::error::DataStoreError;
|
||||
use poem::{error::ResponseError, http::StatusCode, Body, Response};
|
||||
use thiserror::Error;
|
||||
use tracing::error;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::opds::error::OpdsError;
|
||||
|
||||
/// Errors happening during handling of requests.
|
||||
#[derive(Error, Debug)]
|
||||
#[error("opds error")]
|
||||
pub enum HandlerError {
|
||||
/// Error rendering OPDS.
|
||||
#[error("opds error")]
|
||||
OpdsError(#[from] OpdsError),
|
||||
/// Error fetching data from calibre.
|
||||
#[error("data error")]
|
||||
DataError(#[from] DataStoreError),
|
||||
}
|
||||
|
||||
/// Convert a [`HandlerError`](enum@HandlerError) into a suitable response error.
|
||||
///
|
||||
/// Log the real error (internal) with an uuid and send a suitable error message to the user with
|
||||
/// the same uuid (for correlation purposes).
|
||||
impl ResponseError for HandlerError {
|
||||
fn status(&self) -> StatusCode {
|
||||
match &self {
|
||||
HandlerError::OpdsError(_) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
HandlerError::DataError(e) => match e {
|
||||
DataStoreError::NoResults(_) => StatusCode::NOT_FOUND,
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn as_response(&self) -> Response {
|
||||
let id = Uuid::new_v4();
|
||||
let internal_msg = format!("{:?}", self);
|
||||
let external_msg = match &self {
|
||||
HandlerError::OpdsError(_) => "internal server error",
|
||||
HandlerError::DataError(e) => match e {
|
||||
DataStoreError::NoResults(_) => "item not found",
|
||||
_ => "internal server error",
|
||||
},
|
||||
};
|
||||
error!("{id}: {internal_msg}");
|
||||
|
||||
let body = Body::from_json(serde_json::json!({
|
||||
"id": id.to_string(),
|
||||
"message": external_msg,
|
||||
}))
|
||||
.unwrap();
|
||||
Response::builder().status(self.status()).body(body)
|
||||
}
|
||||
}
|
21
little-hesinde/src/handlers/html/author.rs
Normal file
21
little-hesinde/src/handlers/html/author.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
//! Handle a single author for html.
|
||||
|
||||
use calibre_db::data::author::Author;
|
||||
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{data::book::Book, templates::TEMPLATES};
|
||||
|
||||
/// Render a single author in html.
|
||||
pub async fn handler(author: Author, books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let mut context = Context::new();
|
||||
context.insert("title", &author.name);
|
||||
context.insert("nav", "authors");
|
||||
context.insert("books", &books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.map_err(InternalServerError)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
21
little-hesinde/src/handlers/html/authors.rs
Normal file
21
little-hesinde/src/handlers/html/authors.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
//! Handle multiple authors in html.
|
||||
|
||||
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
|
||||
use poem::Response;
|
||||
|
||||
use crate::handlers::paginated;
|
||||
|
||||
/// Render all authors paginated by cursor in html.
|
||||
pub async fn handler(
|
||||
calibre: &Calibre,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
paginated::render(
|
||||
"authors",
|
||||
|| calibre.authors(25, cursor, sort_order),
|
||||
|author| author.sort.clone(),
|
||||
|cursor| calibre.has_previous_authors(cursor),
|
||||
|cursor| calibre.has_more_authors(cursor),
|
||||
)
|
||||
}
|
26
little-hesinde/src/handlers/html/books.rs
Normal file
26
little-hesinde/src/handlers/html/books.rs
Normal file
|
@ -0,0 +1,26 @@
|
|||
//! Handle multiple books in html.
|
||||
|
||||
use calibre_db::data::pagination::SortOrder;
|
||||
use poem::Response;
|
||||
|
||||
use crate::{app_state::AppState, data::book::Book, handlers::paginated};
|
||||
|
||||
/// Render all books paginated by cursor in html.
|
||||
pub async fn handler(
|
||||
state: &AppState,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
paginated::render(
|
||||
"books",
|
||||
|| {
|
||||
state
|
||||
.calibre
|
||||
.books(25, cursor, sort_order)
|
||||
.map(|x| x.iter().filter_map(|y| Book::full_book(y, state)).collect())
|
||||
},
|
||||
|book| book.data.sort.clone(),
|
||||
|cursor| state.calibre.has_previous_books(cursor),
|
||||
|cursor| state.calibre.has_more_books(cursor),
|
||||
)
|
||||
}
|
20
little-hesinde/src/handlers/html/recent.rs
Normal file
20
little-hesinde/src/handlers/html/recent.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
//! Handle recent books in html.
|
||||
|
||||
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{data::book::Book, templates::TEMPLATES};
|
||||
|
||||
/// Render recent books as html.
|
||||
pub async fn handler(recent_books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let mut context = Context::new();
|
||||
context.insert("title", "");
|
||||
context.insert("nav", "recent");
|
||||
context.insert("books", &recent_books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.map_err(InternalServerError)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
20
little-hesinde/src/handlers/html/search.rs
Normal file
20
little-hesinde/src/handlers/html/search.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
//! Handle search results in html.
|
||||
|
||||
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{data::book::Book, templates::TEMPLATES};
|
||||
|
||||
/// Render all search results as html.
|
||||
pub async fn handler(books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let mut context = Context::new();
|
||||
context.insert("title", "Search Results");
|
||||
context.insert("nav", "search");
|
||||
context.insert("books", &books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.map_err(InternalServerError)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
21
little-hesinde/src/handlers/html/series.rs
Normal file
21
little-hesinde/src/handlers/html/series.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
//! Handle multiple series in html.
|
||||
|
||||
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
|
||||
use poem::Response;
|
||||
|
||||
use crate::handlers::paginated;
|
||||
|
||||
/// Render all series paginated by cursor as html.
|
||||
pub async fn handler(
|
||||
calibre: &Calibre,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
paginated::render(
|
||||
"series",
|
||||
|| calibre.series(25, cursor, sort_order),
|
||||
|series| series.sort.clone(),
|
||||
|cursor| calibre.has_previous_series(cursor),
|
||||
|cursor| calibre.has_more_series(cursor),
|
||||
)
|
||||
}
|
21
little-hesinde/src/handlers/html/series_single.rs
Normal file
21
little-hesinde/src/handlers/html/series_single.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
//! Handle a single series in html.
|
||||
|
||||
use calibre_db::data::series::Series;
|
||||
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
|
||||
use tera::Context;
|
||||
|
||||
use crate::{data::book::Book, templates::TEMPLATES};
|
||||
|
||||
/// Render a single series as html.
|
||||
pub async fn handler(series: Series, books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let mut context = Context::new();
|
||||
context.insert("title", &series.name);
|
||||
context.insert("nav", "series");
|
||||
context.insert("books", &books);
|
||||
|
||||
Ok(TEMPLATES
|
||||
.render("book_list", &context)
|
||||
.map_err(InternalServerError)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
39
little-hesinde/src/handlers/opds/author.rs
Normal file
39
little-hesinde/src/handlers/opds/author.rs
Normal file
|
@ -0,0 +1,39 @@
|
|||
//! Handle a single author for opds.
|
||||
|
||||
use calibre_db::data::author::Author;
|
||||
use poem::{IntoResponse, Response};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
data::book::Book,
|
||||
handlers::error::HandlerError,
|
||||
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
|
||||
APP_NAME,
|
||||
};
|
||||
|
||||
/// Render a single author as an OPDS entry embedded in a feed.
|
||||
pub async fn handler(author: Author, books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: format!("/opds/authors/{}", author.id),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}author:{}", author.id),
|
||||
&author.name,
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
|
||||
Ok(xml
|
||||
.with_content_type("application/atom+xml")
|
||||
.into_response())
|
||||
}
|
49
little-hesinde/src/handlers/opds/authors.rs
Normal file
49
little-hesinde/src/handlers/opds/authors.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
//! Handle multiple authors for opds.
|
||||
|
||||
use calibre_db::{
|
||||
calibre::Calibre,
|
||||
data::{author::Author as DbAuthor, pagination::SortOrder},
|
||||
};
|
||||
use poem::{IntoResponse, Response};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
handlers::error::HandlerError,
|
||||
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
|
||||
APP_NAME,
|
||||
};
|
||||
|
||||
/// Render all authors as OPDS entries embedded in a feed.
|
||||
pub async fn handler(
|
||||
calibre: &Calibre,
|
||||
_cursor: Option<&str>,
|
||||
_sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let authors: Vec<DbAuthor> = calibre
|
||||
.authors(u32::MAX.into(), None, &SortOrder::ASC)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
|
||||
let entries: Vec<Entry> = authors.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/authors".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:authors"),
|
||||
"All Authors",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
|
||||
Ok(xml
|
||||
.with_content_type("application/atom+xml")
|
||||
.into_response())
|
||||
}
|
54
little-hesinde/src/handlers/opds/books.rs
Normal file
54
little-hesinde/src/handlers/opds/books.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
//! Handle multiple books for opds.
|
||||
|
||||
use calibre_db::data::pagination::SortOrder;
|
||||
use poem::{IntoResponse, Response};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
app_state::AppState,
|
||||
data::book::Book,
|
||||
handlers::error::HandlerError,
|
||||
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
|
||||
APP_NAME,
|
||||
};
|
||||
|
||||
/// Render all books as OPDS entries embedded in a feed.
|
||||
pub async fn handler(
|
||||
state: &AppState,
|
||||
_cursor: Option<&str>,
|
||||
_sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let books: Vec<Book> = state
|
||||
.calibre
|
||||
.books(u32::MAX.into(), None, &SortOrder::ASC)
|
||||
.map(|x| x.iter().filter_map(|y| Book::full_book(y, state)).collect())
|
||||
.map_err(HandlerError::DataError)?;
|
||||
render_books(books).await
|
||||
}
|
||||
|
||||
/// Render a list of books as OPDS entries in a feed.
|
||||
pub(crate) async fn render_books(books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/books".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:books"),
|
||||
"All Books",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
|
||||
Ok(xml
|
||||
.with_content_type("application/atom+xml")
|
||||
.into_response())
|
||||
}
|
110
little-hesinde/src/handlers/opds/feed.rs
Normal file
110
little-hesinde/src/handlers/opds/feed.rs
Normal file
|
@ -0,0 +1,110 @@
|
|||
//! Handle the OPDS root feed.
|
||||
|
||||
use poem::{handler, web::WithContentType, IntoResponse};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
handlers::error::HandlerError,
|
||||
opds::{
|
||||
content::Content, entry::Entry, feed::Feed, link::Link, media_type::MediaType,
|
||||
relation::Relation,
|
||||
},
|
||||
APP_NAME,
|
||||
};
|
||||
|
||||
/// Render a root OPDS feed with links to the subsections (authors, books, series and recent).
|
||||
#[handler]
|
||||
pub async fn handler() -> Result<WithContentType<String>, poem::Error> {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let books_entry = Entry {
|
||||
title: "Books".to_string(),
|
||||
id: format!("{APP_NAME}:books"),
|
||||
updated: now,
|
||||
content: Some(Content {
|
||||
media_type: MediaType::Text,
|
||||
content: "Index of all books".to_string(),
|
||||
}),
|
||||
author: None,
|
||||
links: vec![Link {
|
||||
href: "/opds/books".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Subsection,
|
||||
title: None,
|
||||
count: None,
|
||||
}],
|
||||
};
|
||||
|
||||
let authors_entry = Entry {
|
||||
title: "Authors".to_string(),
|
||||
id: format!("{APP_NAME}:authors"),
|
||||
updated: now,
|
||||
content: Some(Content {
|
||||
media_type: MediaType::Text,
|
||||
content: "Index of all authors".to_string(),
|
||||
}),
|
||||
author: None,
|
||||
links: vec![Link {
|
||||
href: "/opds/authors".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Subsection,
|
||||
title: None,
|
||||
count: None,
|
||||
}],
|
||||
};
|
||||
|
||||
let series_entry = Entry {
|
||||
title: "Series".to_string(),
|
||||
id: format!("{APP_NAME}:series"),
|
||||
updated: now,
|
||||
content: Some(Content {
|
||||
media_type: MediaType::Text,
|
||||
content: "Index of all series".to_string(),
|
||||
}),
|
||||
author: None,
|
||||
links: vec![Link {
|
||||
href: "/opds/series".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Subsection,
|
||||
title: None,
|
||||
count: None,
|
||||
}],
|
||||
};
|
||||
|
||||
let recents_entry = Entry {
|
||||
title: "Recent Additions".to_string(),
|
||||
id: format!("{APP_NAME}:recentbooks"),
|
||||
updated: now,
|
||||
content: Some(Content {
|
||||
media_type: MediaType::Text,
|
||||
content: "Recently added books".to_string(),
|
||||
}),
|
||||
author: None,
|
||||
links: vec![Link {
|
||||
href: "/opds/recent".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Subsection,
|
||||
title: None,
|
||||
count: None,
|
||||
}],
|
||||
};
|
||||
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:catalog"),
|
||||
"Little Hesinde",
|
||||
self_link,
|
||||
vec![],
|
||||
vec![authors_entry, series_entry, books_entry, recents_entry],
|
||||
);
|
||||
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
|
||||
Ok(xml.with_content_type("application/atom+xml"))
|
||||
}
|
38
little-hesinde/src/handlers/opds/recent.rs
Normal file
38
little-hesinde/src/handlers/opds/recent.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
//! Handle recent books for OPDS.
|
||||
|
||||
use poem::{IntoResponse, Response};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
data::book::Book,
|
||||
handlers::error::HandlerError,
|
||||
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
|
||||
APP_NAME,
|
||||
};
|
||||
|
||||
/// Render recent books as OPDS entries embedded in a feed.
|
||||
pub async fn handler(recent_books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let entries: Vec<Entry> = recent_books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/recent".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:recentbooks"),
|
||||
"Recent Books",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
|
||||
Ok(xml
|
||||
.with_content_type("application/atom+xml")
|
||||
.into_response())
|
||||
}
|
12
little-hesinde/src/handlers/opds/search.rs
Normal file
12
little-hesinde/src/handlers/opds/search.rs
Normal file
|
@ -0,0 +1,12 @@
|
|||
//! Handle search results in opds.
|
||||
|
||||
use poem::Response;
|
||||
|
||||
use crate::data::book::Book;
|
||||
|
||||
use super::books::render_books;
|
||||
|
||||
/// Render search results as OPDS entries in a feed.
|
||||
pub async fn handler(books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
render_books(books).await
|
||||
}
|
27
little-hesinde/src/handlers/opds/search_info.rs
Normal file
27
little-hesinde/src/handlers/opds/search_info.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
//! Handle open search description..
|
||||
|
||||
use crate::{
|
||||
handlers::error::HandlerError,
|
||||
opds::search::{OpenSearchDescription, Url},
|
||||
APP_NAME,
|
||||
};
|
||||
use poem::{handler, IntoResponse, Response};
|
||||
|
||||
/// Render search information as open search description.
|
||||
#[handler]
|
||||
pub async fn handler() -> Result<Response, poem::Error> {
|
||||
let search = OpenSearchDescription {
|
||||
short_name: APP_NAME.to_string(),
|
||||
description: "Search for ebooks".to_string(),
|
||||
input_encoding: "UTF-8".to_string(),
|
||||
output_encoding: "UTF-8".to_string(),
|
||||
url: Url {
|
||||
type_name: "application/atom+xml".to_string(),
|
||||
template: "/opds/search?query={searchTerms}".to_string(),
|
||||
},
|
||||
};
|
||||
let xml = search.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
Ok(xml
|
||||
.with_content_type("application/atom+xml")
|
||||
.into_response())
|
||||
}
|
46
little-hesinde/src/handlers/opds/series.rs
Normal file
46
little-hesinde/src/handlers/opds/series.rs
Normal file
|
@ -0,0 +1,46 @@
|
|||
//! Handle multiple series for OPDS.
|
||||
|
||||
use calibre_db::{calibre::Calibre, data::pagination::SortOrder};
|
||||
use poem::{IntoResponse, Response};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
handlers::error::HandlerError,
|
||||
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
|
||||
APP_NAME,
|
||||
};
|
||||
|
||||
/// Render all series as OPDS entries embedded in a feed.
|
||||
pub async fn handler(
|
||||
calibre: &Calibre,
|
||||
_cursor: Option<&str>,
|
||||
_sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let series = calibre
|
||||
.series(u32::MAX.into(), None, &SortOrder::ASC)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
|
||||
let entries: Vec<Entry> = series.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: "/opds/series".to_string(),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:series"),
|
||||
"All Series",
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
|
||||
Ok(xml
|
||||
.with_content_type("application/atom+xml")
|
||||
.into_response())
|
||||
}
|
39
little-hesinde/src/handlers/opds/series_single.rs
Normal file
39
little-hesinde/src/handlers/opds/series_single.rs
Normal file
|
@ -0,0 +1,39 @@
|
|||
//! Handle a single series for opds.
|
||||
|
||||
use calibre_db::data::series::Series;
|
||||
use poem::{IntoResponse, Response};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{
|
||||
data::book::Book,
|
||||
handlers::error::HandlerError,
|
||||
opds::{entry::Entry, feed::Feed, link::Link, media_type::MediaType, relation::Relation},
|
||||
APP_NAME,
|
||||
};
|
||||
|
||||
/// Render a single series as an OPDS entry embedded in a feed.
|
||||
pub async fn handler(series: Series, books: Vec<Book>) -> Result<Response, poem::Error> {
|
||||
let entries: Vec<Entry> = books.into_iter().map(Entry::from).collect();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
let self_link = Link {
|
||||
href: format!("/opds/series/{}", series.id),
|
||||
media_type: MediaType::Navigation,
|
||||
rel: Relation::Myself,
|
||||
title: None,
|
||||
count: None,
|
||||
};
|
||||
let feed = Feed::create(
|
||||
now,
|
||||
&format!("{APP_NAME}:series:{}", series.id),
|
||||
&series.name,
|
||||
self_link,
|
||||
vec![],
|
||||
entries,
|
||||
);
|
||||
let xml = feed.as_xml().map_err(HandlerError::OpdsError)?;
|
||||
|
||||
Ok(xml
|
||||
.with_content_type("application/atom+xml")
|
||||
.into_response())
|
||||
}
|
|
@ -1,31 +1,12 @@
|
|||
//! Deal with cursor pagination.
|
||||
|
||||
use std::fmt::Debug;
|
||||
|
||||
use axum::response::{Html, IntoResponse, Response};
|
||||
use calibre_db::calibre::DataStoreError;
|
||||
use serde::Serialize;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tera::Context;
|
||||
|
||||
use super::error::HandlerError;
|
||||
use crate::templates::TEMPLATES;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
/// Errors that can occur during pagination.
|
||||
pub enum PaginationError {
|
||||
/// A failure to fetch pagination data.
|
||||
#[snafu(display("Failed to fetch pagination data."))]
|
||||
Fetch { source: DataStoreError },
|
||||
/// A failure to render the template.
|
||||
#[snafu(display("Failed to render template."))]
|
||||
Template { source: tera::Error },
|
||||
/// A failure to fetch previous items.
|
||||
#[snafu(display("Failed to fetch previous items."))]
|
||||
Previous { source: DataStoreError },
|
||||
/// A failure to fetch more items.
|
||||
#[snafu(display("Failed to fetch more items."))]
|
||||
More { source: DataStoreError },
|
||||
}
|
||||
use calibre_db::data::error::DataStoreError;
|
||||
use poem::{error::InternalServerError, web::Html, IntoResponse, Response};
|
||||
use serde::Serialize;
|
||||
use std::fmt::Debug;
|
||||
use tera::Context;
|
||||
|
||||
/// Render a tera template with paginated items and generate back and forth links.
|
||||
pub fn render<T: Serialize + Debug, F, S, P, M>(
|
||||
|
@ -34,7 +15,7 @@ pub fn render<T: Serialize + Debug, F, S, P, M>(
|
|||
sort_field: S,
|
||||
has_previous: P,
|
||||
has_more: M,
|
||||
) -> Result<Response, PaginationError>
|
||||
) -> Result<Response, poem::Error>
|
||||
where
|
||||
F: Fn() -> Result<Vec<T>, DataStoreError>,
|
||||
S: Fn(&T) -> String,
|
||||
|
@ -44,11 +25,11 @@ where
|
|||
let mut context = Context::new();
|
||||
context.insert("nav", template);
|
||||
|
||||
let items = fetcher().context(FetchSnafu)?;
|
||||
let items = fetcher().map_err(HandlerError::DataError)?;
|
||||
if items.is_empty() {
|
||||
return Ok(TEMPLATES
|
||||
.render("empty", &context)
|
||||
.context(TemplateSnafu)
|
||||
.map_err(InternalServerError)
|
||||
.map(Html)?
|
||||
.into_response());
|
||||
}
|
||||
|
@ -58,8 +39,8 @@ where
|
|||
|
||||
let (backward_cursor, forward_cursor) = (sort_field(first_item), sort_field(last_item));
|
||||
|
||||
let has_previous = has_previous(&backward_cursor).context(PreviousSnafu)?;
|
||||
let has_more = has_more(&forward_cursor).context(MoreSnafu)?;
|
||||
let has_previous = has_previous(&backward_cursor).map_err(HandlerError::DataError)?;
|
||||
let has_more = has_more(&forward_cursor).map_err(HandlerError::DataError)?;
|
||||
|
||||
context.insert("has_previous", &has_previous);
|
||||
context.insert("has_more", &has_more);
|
||||
|
@ -69,7 +50,7 @@ where
|
|||
|
||||
Ok(TEMPLATES
|
||||
.render(template, &context)
|
||||
.context(TemplateSnafu)
|
||||
.map_err(InternalServerError)
|
||||
.map(Html)?
|
||||
.into_response())
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
//! Handle requests for recent books.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use poem::{handler, web::Data, Response};
|
||||
|
||||
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
|
||||
|
||||
/// Handle a request recent books and decide whether to render to html or OPDS.
|
||||
#[handler]
|
||||
pub async fn handler(
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let recent_books = state
|
||||
.calibre
|
||||
.recent_books(25)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
let recent_books = recent_books
|
||||
.iter()
|
||||
.filter_map(|x| Book::full_book(x, &state))
|
||||
.collect::<Vec<Book>>();
|
||||
|
||||
match accept.0 {
|
||||
Accept::Html => crate::handlers::html::recent::handler(recent_books).await,
|
||||
Accept::Opds => crate::handlers::opds::recent::handler(recent_books).await,
|
||||
}
|
||||
}
|
38
little-hesinde/src/handlers/search.rs
Normal file
38
little-hesinde/src/handlers/search.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
//! Handle search requests.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use poem::{
|
||||
handler,
|
||||
web::{Data, Query},
|
||||
Response,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Params {
|
||||
/// Query for a search request.
|
||||
query: String,
|
||||
}
|
||||
/// Handle a search request with query parameter `query`.
|
||||
#[handler]
|
||||
pub async fn handler(
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
Query(params): Query<Params>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let books = state
|
||||
.calibre
|
||||
.search(¶ms.query)
|
||||
.map_err(HandlerError::DataError)?
|
||||
.iter()
|
||||
.filter_map(|book| Book::full_book(book, *state))
|
||||
.collect();
|
||||
|
||||
match *accept {
|
||||
Accept::Html => crate::handlers::html::search::handler(books).await,
|
||||
Accept::Opds => crate::handlers::opds::search::handler(books).await,
|
||||
}
|
||||
}
|
48
little-hesinde/src/handlers/series.rs
Normal file
48
little-hesinde/src/handlers/series.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
//! Handle requests for multiple series.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use calibre_db::data::pagination::SortOrder;
|
||||
use poem::{
|
||||
handler,
|
||||
web::{Data, Path},
|
||||
Response,
|
||||
};
|
||||
|
||||
use crate::{app_state::AppState, Accept};
|
||||
|
||||
/// Handle a request for multiple series, starting at the first.
|
||||
#[handler]
|
||||
pub async fn handler_init(
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
series(&accept, &state, None, &SortOrder::ASC).await
|
||||
}
|
||||
|
||||
/// Handle a request for multiple series, starting at the `cursor` and going in the direction of
|
||||
/// `sort_order`.
|
||||
#[handler]
|
||||
pub async fn handler(
|
||||
Path((cursor, sort_order)): Path<(String, SortOrder)>,
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
series(&accept, &state, Some(&cursor), &sort_order).await
|
||||
}
|
||||
|
||||
async fn series(
|
||||
accept: &Accept,
|
||||
state: &Arc<AppState>,
|
||||
cursor: Option<&str>,
|
||||
sort_order: &SortOrder,
|
||||
) -> Result<Response, poem::Error> {
|
||||
match accept {
|
||||
Accept::Html => {
|
||||
crate::handlers::html::series::handler(&state.calibre, cursor, sort_order).await
|
||||
}
|
||||
Accept::Opds => {
|
||||
crate::handlers::opds::series::handler(&state.calibre, cursor, sort_order).await
|
||||
}
|
||||
}
|
||||
}
|
37
little-hesinde/src/handlers/series_single.rs
Normal file
37
little-hesinde/src/handlers/series_single.rs
Normal file
|
@ -0,0 +1,37 @@
|
|||
//! Handle requests for a single series.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use poem::{
|
||||
handler,
|
||||
web::{Data, Path},
|
||||
Response,
|
||||
};
|
||||
|
||||
use crate::{app_state::AppState, data::book::Book, handlers::error::HandlerError, Accept};
|
||||
|
||||
/// Handle a request for a series with `id` and decide whether to render to html or OPDS.
|
||||
#[handler]
|
||||
pub async fn handler(
|
||||
id: Path<u64>,
|
||||
accept: Data<&Accept>,
|
||||
state: Data<&Arc<AppState>>,
|
||||
) -> Result<Response, poem::Error> {
|
||||
let series = state
|
||||
.calibre
|
||||
.scalar_series(*id)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
let books = state
|
||||
.calibre
|
||||
.series_books(*id)
|
||||
.map_err(HandlerError::DataError)?;
|
||||
let books = books
|
||||
.iter()
|
||||
.filter_map(|x| Book::full_book(x, &state))
|
||||
.collect::<Vec<Book>>();
|
||||
|
||||
match accept.0 {
|
||||
Accept::Html => crate::handlers::html::series_single::handler(series, books).await,
|
||||
Accept::Opds => crate::handlers::opds::series_single::handler(series, books).await,
|
||||
}
|
||||
}
|
11
little-hesinde/src/handlers/source_archive.rs
Normal file
11
little-hesinde/src/handlers/source_archive.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
use crate::{APP_NAME, VERSION};
|
||||
use poem::{handler, Response};
|
||||
|
||||
const SOURCE_ARCHIVE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/archive.zip"));
|
||||
|
||||
/// Handle a request for source code of the server..
|
||||
#[handler]
|
||||
pub async fn handler() -> Result<Response, poem::Error> {
|
||||
let file_name = format!("{APP_NAME}-{VERSION}.zip");
|
||||
crate::handlers::download::handler(&file_name, SOURCE_ARCHIVE, "application/zip").await
|
||||
}
|
|
@ -2,29 +2,82 @@
|
|||
//!
|
||||
//! Shamelessly written to scratch my own itches.
|
||||
|
||||
use api::ServeError;
|
||||
use std::sync::Arc;
|
||||
|
||||
use app_state::AppState;
|
||||
use calibre_db::calibre::{Calibre, LoadError};
|
||||
use calibre_db::calibre::Calibre;
|
||||
use config::Config;
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use poem::{
|
||||
endpoint::EmbeddedFilesEndpoint, get, listener::TcpListener, middleware::Tracing, EndpointExt,
|
||||
Route, Server,
|
||||
};
|
||||
use rust_embed::RustEmbed;
|
||||
use tokio::signal;
|
||||
use tracing::info;
|
||||
|
||||
pub mod api;
|
||||
pub mod app_state;
|
||||
pub mod cache;
|
||||
pub mod cli;
|
||||
pub mod config;
|
||||
/// Data structs and their functions.
|
||||
pub mod data;
|
||||
pub mod data {
|
||||
pub mod book;
|
||||
}
|
||||
/// Request handlers. Because it can not be guaranteed that a proper accept header is sent, the
|
||||
/// routes are doubled and the decision on whether to render html or OPDS is made with internal
|
||||
/// data on the respective routes.
|
||||
pub mod handlers {
|
||||
/// Handle requests for html.
|
||||
pub mod html {
|
||||
pub mod author;
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod recent;
|
||||
pub mod search;
|
||||
pub mod series;
|
||||
pub mod series_single;
|
||||
}
|
||||
/// Handle requests for OPDS.
|
||||
pub mod opds {
|
||||
pub mod author;
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod feed;
|
||||
pub mod recent;
|
||||
pub mod search;
|
||||
pub mod search_info;
|
||||
pub mod series;
|
||||
pub mod series_single;
|
||||
}
|
||||
pub mod author;
|
||||
pub mod authors;
|
||||
pub mod books;
|
||||
pub mod cover;
|
||||
pub mod download;
|
||||
pub mod error;
|
||||
pub mod paginated;
|
||||
pub mod recent;
|
||||
pub mod search;
|
||||
pub mod series;
|
||||
pub mod series_single;
|
||||
pub mod source_archive;
|
||||
}
|
||||
/// OPDS data structs.
|
||||
pub mod opds;
|
||||
pub mod opds {
|
||||
pub mod author;
|
||||
pub mod content;
|
||||
pub mod entry;
|
||||
pub mod error;
|
||||
pub mod feed;
|
||||
pub mod link;
|
||||
pub mod media_type;
|
||||
pub mod relation;
|
||||
pub mod search;
|
||||
}
|
||||
pub mod templates;
|
||||
|
||||
/// The application name.
|
||||
const APP_NAME: &str = env!("CARGO_PKG_NAME");
|
||||
/// The application version.
|
||||
const APP_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
pub const APP_NAME: &str = "little-hesinde";
|
||||
pub const VERSION: &str = "0.3.1";
|
||||
|
||||
/// Internal marker data in lieu of a proper `Accept` header.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
@ -35,32 +88,73 @@ pub enum Accept {
|
|||
Opds,
|
||||
}
|
||||
|
||||
/// Errors from running little-hesinde.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum RunError {
|
||||
#[snafu(display("Failed to load calibre database."))]
|
||||
LoadCalibre { source: LoadError },
|
||||
#[snafu(display("Failed to run http server."))]
|
||||
Serve { source: ServeError },
|
||||
}
|
||||
/// Embedd static files.
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "static"]
|
||||
pub struct Files;
|
||||
|
||||
/// Main entry point to run the ebook server with a calibre library specified in `config`.
|
||||
pub async fn run(config: Config) -> Result<(), RunError> {
|
||||
let calibre = Calibre::load(&config.metadata_path).context(LoadCalibreSnafu)?;
|
||||
let app_state = AppState {
|
||||
pub async fn run(config: Config) -> Result<(), std::io::Error> {
|
||||
let calibre = Calibre::load(&config.metadata_path).expect("failed to load calibre database");
|
||||
let app_state = Arc::new(AppState {
|
||||
calibre,
|
||||
config: config.clone(),
|
||||
};
|
||||
});
|
||||
|
||||
let server = api::serve(config.listen_address, app_state);
|
||||
let html_routes = Route::new()
|
||||
.at("/", get(handlers::recent::handler))
|
||||
.at("/books", get(handlers::books::handler_init))
|
||||
.at("/books/:cursor/:sort_order", get(handlers::books::handler))
|
||||
.at("/series", get(handlers::series::handler_init))
|
||||
.at(
|
||||
"/series/:cursor/:sort_order",
|
||||
get(handlers::series::handler),
|
||||
)
|
||||
.at("/series/:id", get(handlers::series_single::handler))
|
||||
.at("/authors", get(handlers::authors::handler_init))
|
||||
.at("/authors/:id", get(handlers::author::handler))
|
||||
.at(
|
||||
"/authors/:cursor/:sort_order",
|
||||
get(handlers::authors::handler),
|
||||
)
|
||||
.at("/cover/:id", get(handlers::cover::handler_full))
|
||||
.at(
|
||||
"/cover/:id/thumbnail",
|
||||
get(handlers::cover::handler_thumbnail),
|
||||
)
|
||||
.at("/book/:id/:format", get(handlers::books::handler_download))
|
||||
.at("/archive", get(handlers::source_archive::handler))
|
||||
.at("/search", get(handlers::search::handler))
|
||||
.nest("/static", EmbeddedFilesEndpoint::<Files>::new())
|
||||
.data(Accept::Html);
|
||||
|
||||
let opds_routes = Route::new()
|
||||
.at("/", get(handlers::opds::feed::handler))
|
||||
.at("/recent", get(handlers::recent::handler))
|
||||
.at("/books", get(handlers::books::handler_init))
|
||||
.at("/authors", get(handlers::authors::handler_init))
|
||||
.at("/authors/:id", get(handlers::author::handler))
|
||||
.at("/series", get(handlers::series::handler_init))
|
||||
.at("/series/:id", get(handlers::series_single::handler))
|
||||
.at("/search/info", get(handlers::opds::search_info::handler))
|
||||
.at("/search", get(handlers::search::handler))
|
||||
.data(Accept::Opds);
|
||||
|
||||
let app = Route::new()
|
||||
.nest("/", html_routes)
|
||||
.nest("/opds", opds_routes)
|
||||
.data(app_state)
|
||||
.with(Tracing);
|
||||
|
||||
let server = Server::new(TcpListener::bind(config.listen_address))
|
||||
.name("little-hesinde")
|
||||
.run(app);
|
||||
|
||||
tokio::select! {
|
||||
res = server => {
|
||||
res.context(ServeSnafu)
|
||||
},
|
||||
_ = server => {},
|
||||
_ = signal::ctrl_c() => {
|
||||
info!("Received Ctrl+C, shutting down...");
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,39 +1,12 @@
|
|||
use clap::Parser;
|
||||
use little_hesinde::{
|
||||
RunError,
|
||||
cli::Cli,
|
||||
config::{Config, LoadError},
|
||||
};
|
||||
use snafu::{ResultExt, Snafu};
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
/// Top-level application errors.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum Error {
|
||||
#[snafu(display("Failed to load config."))]
|
||||
Config { source: LoadError },
|
||||
#[snafu(display("Failed to run little-hesinde."))]
|
||||
Run { source: RunError },
|
||||
}
|
||||
use little_hesinde::{cli::Cli, config::Config};
|
||||
|
||||
#[tokio::main]
|
||||
#[snafu::report]
|
||||
async fn main() -> Result<(), Error> {
|
||||
tracing_subscriber::registry()
|
||||
.with(
|
||||
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| {
|
||||
format!(
|
||||
"{}=debug,tower_http=debug,axum::rejection=trace",
|
||||
env!("CARGO_CRATE_NAME")
|
||||
)
|
||||
.into()
|
||||
}),
|
||||
)
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.init();
|
||||
async fn main() -> Result<(), std::io::Error> {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let args = Cli::parse();
|
||||
let config = Config::load(&args).context(ConfigSnafu)?;
|
||||
let config = Config::load(&args).expect("failed to load configuration");
|
||||
|
||||
little_hesinde::run(config).await.context(RunSnafu)
|
||||
little_hesinde::run(config).await
|
||||
}
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
pub mod author;
|
||||
pub mod content;
|
||||
pub mod entry;
|
||||
pub mod error;
|
||||
pub mod feed;
|
||||
pub mod link;
|
||||
pub mod media_type;
|
||||
pub mod relation;
|
||||
pub mod search;
|
|
@ -33,9 +33,6 @@ mod tests {
|
|||
fn serialize() {
|
||||
let author = init();
|
||||
let xml = to_string(&author).unwrap();
|
||||
assert_eq!(
|
||||
xml,
|
||||
"<author><name>Rohal der Weise</name><uri>https://de.wiki-aventurica.de/wiki/Rohal_der_Weise</uri><email>rohal@aventurien.de</email></author>"
|
||||
);
|
||||
assert_eq!(xml, "<author><name>Rohal der Weise</name><uri>https://de.wiki-aventurica.de/wiki/Rohal_der_Weise</uri><email>rohal@aventurien.de</email></author>");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,10 +4,11 @@ use calibre_db::data::{author::Author as DbAuthor, series::Series};
|
|||
use serde::Serialize;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{data::book::Book, APP_NAME};
|
||||
|
||||
use super::{
|
||||
author::Author, content::Content, link::Link, media_type::MediaType, relation::Relation,
|
||||
};
|
||||
use crate::{APP_NAME, data::book::Book};
|
||||
|
||||
/// Fundamental piece of OPDS, holding information about entries (for example a book).
|
||||
#[derive(Debug, Serialize)]
|
||||
|
@ -125,9 +126,10 @@ mod tests {
|
|||
use quick_xml::se::to_string;
|
||||
use time::macros::datetime;
|
||||
|
||||
use super::*;
|
||||
use crate::opds::{content::Content, media_type::MediaType, relation::Relation};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn init() -> Entry {
|
||||
Entry {
|
||||
title: "Authors".to_string(),
|
||||
|
|
|
@ -2,18 +2,26 @@
|
|||
|
||||
use std::{io, string::FromUtf8Error};
|
||||
|
||||
use quick_xml::SeError;
|
||||
use snafu::Snafu;
|
||||
use quick_xml::DeError;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors happening during handling OPDS data.
|
||||
#[derive(Debug, Snafu)]
|
||||
pub enum AsXmlError {
|
||||
#[snafu(display("Failed to serialize object."), visibility(pub))]
|
||||
ToString { source: SeError },
|
||||
#[snafu(display("Failed to write xml event."), visibility(pub))]
|
||||
WriteXmlEvent { source: io::Error },
|
||||
#[snafu(display("Failed to read xml event."), visibility(pub))]
|
||||
ReadXmlEvent { source: quick_xml::Error },
|
||||
#[snafu(display("Failed to read bytes as utf8 string."), visibility(pub))]
|
||||
BytesToUtf8 { source: FromUtf8Error },
|
||||
#[derive(Error, Debug)]
|
||||
#[error("opds error")]
|
||||
pub enum OpdsError {
|
||||
/// Error serializing OPDS data.
|
||||
#[error("failed to serialize struct")]
|
||||
SerializingError(#[from] DeError),
|
||||
/// Error parsing OPDS xml structure.
|
||||
#[error("xml failure")]
|
||||
XmlError(#[from] quick_xml::Error),
|
||||
/// Error decoding xml as UTF-8.
|
||||
#[error("failed to decode as utf-8")]
|
||||
Utf8Error(#[from] FromUtf8Error),
|
||||
/// Error parsing OPDS xml structure.
|
||||
#[error("xml serialization failure")]
|
||||
XmlSerializationError(#[from] quick_xml::SeError),
|
||||
/// Error parsing OPDS xml structure.
|
||||
#[error("xml io failure")]
|
||||
XmlIoError(#[from] io::Error),
|
||||
}
|
||||
|
|
|
@ -8,15 +8,10 @@ use quick_xml::{
|
|||
se::to_string,
|
||||
};
|
||||
use serde::Serialize;
|
||||
use snafu::ResultExt;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::{
|
||||
author::Author,
|
||||
entry::Entry,
|
||||
error::{AsXmlError, BytesToUtf8Snafu, ReadXmlEventSnafu, ToStringSnafu, WriteXmlEventSnafu},
|
||||
link::Link,
|
||||
media_type::MediaType,
|
||||
author::Author, entry::Entry, error::OpdsError, link::Link, media_type::MediaType,
|
||||
relation::Relation,
|
||||
};
|
||||
|
||||
|
@ -89,16 +84,14 @@ impl Feed {
|
|||
}
|
||||
|
||||
/// Serialize a feed to OPDS xml.
|
||||
pub fn as_xml(&self) -> Result<String, AsXmlError> {
|
||||
let xml = to_string(&self).context(ToStringSnafu)?;
|
||||
pub fn as_xml(&self) -> Result<String, OpdsError> {
|
||||
let xml = to_string(&self)?;
|
||||
let mut reader = Reader::from_str(&xml);
|
||||
reader.config_mut().trim_text(true);
|
||||
|
||||
let declaration = BytesDecl::new("1.0", Some("UTF-8"), None);
|
||||
let mut writer = Writer::new(Cursor::new(Vec::new()));
|
||||
writer
|
||||
.write_event(Event::Decl(declaration))
|
||||
.context(WriteXmlEventSnafu)?;
|
||||
writer.write_event(Event::Decl(declaration))?;
|
||||
|
||||
let mut feed_start = BytesStart::new("feed");
|
||||
feed_start.push_attribute(("xmlns", "http://www.w3.org/2005/Atom"));
|
||||
|
@ -110,15 +103,15 @@ impl Feed {
|
|||
|
||||
loop {
|
||||
match reader.read_event() {
|
||||
Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => writer
|
||||
.write_event(Event::Start(feed_start.clone()))
|
||||
.context(WriteXmlEventSnafu)?,
|
||||
Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => {
|
||||
writer.write_event(Event::Start(feed_start.clone()))?
|
||||
}
|
||||
Ok(Event::Eof) => break,
|
||||
Ok(e) => writer.write_event(e).context(WriteXmlEventSnafu)?,
|
||||
Err(e) => return Err(e).context(ReadXmlEventSnafu)?,
|
||||
Ok(e) => writer.write_event(e)?,
|
||||
Err(e) => return Err(e)?,
|
||||
}
|
||||
}
|
||||
let result = writer.into_inner().into_inner();
|
||||
String::from_utf8(result).context(BytesToUtf8Snafu)
|
||||
Ok(String::from_utf8(result)?)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{media_type::MediaType, relation::Relation};
|
||||
use crate::data::book::{Book, Format};
|
||||
|
||||
use super::{media_type::MediaType, relation::Relation};
|
||||
|
||||
/// Link element in OPDS.
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename = "link")]
|
||||
|
@ -31,13 +32,13 @@ pub struct Link {
|
|||
/// Convert a format from a book into a link where it is downloadable.
|
||||
impl From<(&Book, (&Format, &str))> for Link {
|
||||
fn from(value: (&Book, (&Format, &str))) -> Self {
|
||||
let format = value.1.0.clone();
|
||||
let format = value.1 .0.clone();
|
||||
let media_type: MediaType = format.into();
|
||||
Self {
|
||||
href: format!("/book/{}/{}", value.0.data.id, value.1.0),
|
||||
href: format!("/book/{}/{}", value.0.data.id, value.1 .0),
|
||||
media_type,
|
||||
rel: media_type.into(),
|
||||
title: Some(value.1.0.0.clone()),
|
||||
title: Some(value.1 .0 .0.clone()),
|
||||
count: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,24 +3,19 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use quick_xml::{
|
||||
Reader, Writer,
|
||||
events::{BytesDecl, BytesStart, Event},
|
||||
se::to_string,
|
||||
Reader, Writer,
|
||||
};
|
||||
use serde::Serialize;
|
||||
use snafu::ResultExt;
|
||||
|
||||
use super::error::{
|
||||
AsXmlError, BytesToUtf8Snafu, ReadXmlEventSnafu, ToStringSnafu, WriteXmlEventSnafu,
|
||||
};
|
||||
use super::error::OpdsError;
|
||||
|
||||
/// Url pointing to a location.
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct Url {
|
||||
/// The media type of the resource.
|
||||
#[serde(rename = "@type")]
|
||||
pub type_name: String,
|
||||
/// The URL template.
|
||||
#[serde(rename = "@template")]
|
||||
pub template: String,
|
||||
}
|
||||
|
@ -42,31 +37,29 @@ pub struct OpenSearchDescription {
|
|||
|
||||
impl OpenSearchDescription {
|
||||
/// Serialize search information to an open search description xml.
|
||||
pub fn as_xml(&self) -> Result<String, AsXmlError> {
|
||||
let xml = to_string(&self).context(ToStringSnafu)?;
|
||||
pub fn as_xml(&self) -> Result<String, OpdsError> {
|
||||
let xml = to_string(&self)?;
|
||||
let mut reader = Reader::from_str(&xml);
|
||||
reader.config_mut().trim_text(true);
|
||||
|
||||
let declaration = BytesDecl::new("1.0", Some("UTF-8"), None);
|
||||
let mut writer = Writer::new(Cursor::new(Vec::new()));
|
||||
writer
|
||||
.write_event(Event::Decl(declaration))
|
||||
.context(WriteXmlEventSnafu)?;
|
||||
writer.write_event(Event::Decl(declaration))?;
|
||||
|
||||
let mut search_start = BytesStart::new("OpenSearchDescription");
|
||||
search_start.push_attribute(("xmlns", "http://a9.com/-/spec/opensearch/1.1/"));
|
||||
|
||||
loop {
|
||||
match reader.read_event() {
|
||||
Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => writer
|
||||
.write_event(Event::Start(search_start.clone()))
|
||||
.context(WriteXmlEventSnafu)?,
|
||||
Ok(Event::Start(e)) if e.name().as_ref() == b"feed" => {
|
||||
writer.write_event(Event::Start(search_start.clone()))?
|
||||
}
|
||||
Ok(Event::Eof) => break,
|
||||
Ok(e) => writer.write_event(e).context(WriteXmlEventSnafu)?,
|
||||
Err(e) => return Err(e).context(ReadXmlEventSnafu)?,
|
||||
Ok(e) => writer.write_event(e)?,
|
||||
Err(e) => return Err(e)?,
|
||||
}
|
||||
}
|
||||
let result = writer.into_inner().into_inner();
|
||||
String::from_utf8(result).context(BytesToUtf8Snafu)
|
||||
Ok(String::from_utf8(result)?)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue