diff --git a/src/api.rs b/src/api.rs index 2456107..b85f2ec 100644 --- a/src/api.rs +++ b/src/api.rs @@ -1,3 +1,5 @@ +//! HTTP API server for semantic search queries. + use std::{ io, net::{AddrParseError, SocketAddr}, @@ -20,6 +22,7 @@ pub mod state; const TAG: &str = "little-librarian"; +/// OpenAPI documentation configuration. #[derive(OpenApi)] #[openapi( tags( @@ -28,6 +31,7 @@ const TAG: &str = "little-librarian"; )] struct ApiDoc; +/// Errors that occur when starting the HTTP server. #[derive(Debug, Snafu)] pub enum ServeError { #[snafu(display("Failed to parse address into :."))] @@ -41,6 +45,7 @@ pub enum ServeError { Serve { source: io::Error }, } +/// Start the HTTP API server with the given configuration. pub async fn serve( address: &str, db: Postgres, diff --git a/src/api/error.rs b/src/api/error.rs index 15fbe91..a60aba5 100644 --- a/src/api/error.rs +++ b/src/api/error.rs @@ -1,16 +1,24 @@ +//! HTTP error handling and response formatting. + use axum::http::StatusCode; use serde::Serialize; +/// Standard error response format for API endpoints. #[derive(Serialize)] pub struct ErrorResponse { + /// Unique identifier for tracking this error instance. pub id: String, + /// Human-readable error message. pub error: String, } +/// Map error types to HTTP status codes. pub trait HttpStatus { + /// Return the appropriate HTTP status code for this error. fn status_code(&self) -> StatusCode; } +/// Generate IntoResponse implementation for error types with JSON formatting. #[macro_export] macro_rules! http_error { ($error_type:ty) => { diff --git a/src/api/query.rs b/src/api/query.rs index 65185ed..4cda3c8 100644 --- a/src/api/query.rs +++ b/src/api/query.rs @@ -1,3 +1,5 @@ +//! Query endpoint handlers and response types. + use std::sync::Arc; use axum::{ @@ -14,6 +16,7 @@ use crate::{http_error, query, storage::DocumentMatch}; const MAX_LIMIT: usize = 10; +/// Errors that occur during query processing. #[derive(Debug, Snafu)] pub enum QueryError { #[snafu(display("'limit' query parameter must be a positive integer <= {MAX_LIMIT}."))] @@ -33,15 +36,21 @@ impl HttpStatus for QueryError { http_error!(QueryError); +/// Query parameters for search requests. #[derive(Deserialize)] pub struct QueryParams { + /// Maximum number of results to return. pub limit: Option, } +/// Response format for successful query requests. #[derive(Debug, Serialize, ToSchema)] pub struct QueryResponse { + /// List of matching document chunks. pub results: Vec, + /// Total number of results returned. pub count: usize, + /// Original query text that was searched. pub query: String, } @@ -58,10 +67,14 @@ impl From<(Vec, String)> for QueryResponse { } } +/// A single document search result. #[derive(Debug, Serialize, ToSchema)] pub struct DocumentResult { + /// Calibre book ID containing this text. pub book_id: i64, + /// Text content of the matching chunk. pub text_chunk: String, + /// Similarity score between 0.0 and 1.0. pub similarity: f64, } @@ -75,6 +88,7 @@ impl From for DocumentResult { } } +/// Execute a semantic search query against the document database. #[utoipa::path( post, path = "/query", diff --git a/src/api/routes.rs b/src/api/routes.rs index 50f1036..d1c06a1 100644 --- a/src/api/routes.rs +++ b/src/api/routes.rs @@ -1,3 +1,5 @@ +//! HTTP route definitions and router configuration. + use std::sync::Arc; use tower_http::trace::TraceLayer; @@ -6,6 +8,7 @@ use utoipa_axum::{router::OpenApiRouter, routes}; use super::state::AppState; use crate::api::query; +/// Create the main API router with all endpoints and middleware. pub fn router(state: AppState) -> OpenApiRouter { let store = Arc::new(state); OpenApiRouter::new() diff --git a/src/api/state.rs b/src/api/state.rs index 6871e05..fc9239f 100644 --- a/src/api/state.rs +++ b/src/api/state.rs @@ -1,10 +1,18 @@ +//! Shared application state for HTTP handlers. + use crate::{storage::Postgres, text_encoder::TextEncoder, tokenize::Tokenizer}; +/// Application state shared across all HTTP request handlers. #[derive(Debug, Clone)] pub struct AppState { + /// Database connection pool. pub db: Postgres, + /// Text tokenizer for processing queries and documents. pub tokenizer: Tokenizer, + /// Primary embedding model for vector generation. pub embedder: TextEncoder, + /// Reranking model for improving search relevance. pub reranker: TextEncoder, + /// Text chunk size in words for processing. pub chunk_size: usize, }