document api functions

This commit is contained in:
Sebastian Hugentobler 2025-07-01 14:07:08 +02:00
parent 552fce432b
commit 480e16d070
Signed by: shu
SSH key fingerprint: SHA256:ppcx6MlixdNZd5EUM1nkHOKoyQYoJwzuQKXM6J/t66M
5 changed files with 38 additions and 0 deletions

View file

@ -1,3 +1,5 @@
//! HTTP API server for semantic search queries.
use std::{
io,
net::{AddrParseError, SocketAddr},
@ -20,6 +22,7 @@ pub mod state;
const TAG: &str = "little-librarian";
/// OpenAPI documentation configuration.
#[derive(OpenApi)]
#[openapi(
tags(
@ -28,6 +31,7 @@ const TAG: &str = "little-librarian";
)]
struct ApiDoc;
/// Errors that occur when starting the HTTP server.
#[derive(Debug, Snafu)]
pub enum ServeError {
#[snafu(display("Failed to parse address into <ip>:<port>."))]
@ -41,6 +45,7 @@ pub enum ServeError {
Serve { source: io::Error },
}
/// Start the HTTP API server with the given configuration.
pub async fn serve(
address: &str,
db: Postgres,

View file

@ -1,16 +1,24 @@
//! HTTP error handling and response formatting.
use axum::http::StatusCode;
use serde::Serialize;
/// Standard error response format for API endpoints.
#[derive(Serialize)]
pub struct ErrorResponse {
/// Unique identifier for tracking this error instance.
pub id: String,
/// Human-readable error message.
pub error: String,
}
/// Map error types to HTTP status codes.
pub trait HttpStatus {
/// Return the appropriate HTTP status code for this error.
fn status_code(&self) -> StatusCode;
}
/// Generate IntoResponse implementation for error types with JSON formatting.
#[macro_export]
macro_rules! http_error {
($error_type:ty) => {

View file

@ -1,3 +1,5 @@
//! Query endpoint handlers and response types.
use std::sync::Arc;
use axum::{
@ -14,6 +16,7 @@ use crate::{http_error, query, storage::DocumentMatch};
const MAX_LIMIT: usize = 10;
/// Errors that occur during query processing.
#[derive(Debug, Snafu)]
pub enum QueryError {
#[snafu(display("'limit' query parameter must be a positive integer <= {MAX_LIMIT}."))]
@ -33,15 +36,21 @@ impl HttpStatus for QueryError {
http_error!(QueryError);
/// Query parameters for search requests.
#[derive(Deserialize)]
pub struct QueryParams {
/// Maximum number of results to return.
pub limit: Option<usize>,
}
/// Response format for successful query requests.
#[derive(Debug, Serialize, ToSchema)]
pub struct QueryResponse {
/// List of matching document chunks.
pub results: Vec<DocumentResult>,
/// Total number of results returned.
pub count: usize,
/// Original query text that was searched.
pub query: String,
}
@ -58,10 +67,14 @@ impl From<(Vec<DocumentMatch>, String)> for QueryResponse {
}
}
/// A single document search result.
#[derive(Debug, Serialize, ToSchema)]
pub struct DocumentResult {
/// Calibre book ID containing this text.
pub book_id: i64,
/// Text content of the matching chunk.
pub text_chunk: String,
/// Similarity score between 0.0 and 1.0.
pub similarity: f64,
}
@ -75,6 +88,7 @@ impl From<DocumentMatch> for DocumentResult {
}
}
/// Execute a semantic search query against the document database.
#[utoipa::path(
post,
path = "/query",

View file

@ -1,3 +1,5 @@
//! HTTP route definitions and router configuration.
use std::sync::Arc;
use tower_http::trace::TraceLayer;
@ -6,6 +8,7 @@ use utoipa_axum::{router::OpenApiRouter, routes};
use super::state::AppState;
use crate::api::query;
/// Create the main API router with all endpoints and middleware.
pub fn router(state: AppState) -> OpenApiRouter {
let store = Arc::new(state);
OpenApiRouter::new()

View file

@ -1,10 +1,18 @@
//! Shared application state for HTTP handlers.
use crate::{storage::Postgres, text_encoder::TextEncoder, tokenize::Tokenizer};
/// Application state shared across all HTTP request handlers.
#[derive(Debug, Clone)]
pub struct AppState {
/// Database connection pool.
pub db: Postgres,
/// Text tokenizer for processing queries and documents.
pub tokenizer: Tokenizer,
/// Primary embedding model for vector generation.
pub embedder: TextEncoder,
/// Reranking model for improving search relevance.
pub reranker: TextEncoder,
/// Text chunk size in words for processing.
pub chunk_size: usize,
}