initial commit

This commit is contained in:
Sebastian Hugentobler 2024-07-10 14:30:26 +02:00
commit 8d297920fb
Signed by: shu
GPG Key ID: BB32CF3CA052C2F0
27 changed files with 6743 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

3748
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

15
Cargo.toml Normal file
View File

@ -0,0 +1,15 @@
[workspace]
resolver = "2"
members = [
"app", "entity", "migration"
]
[workspace.dependencies]
sea-orm = "0.12.15"
serde = "1.0.203"
time = "0.3.36"
[workspace.package]
license = "AGPL-3.0"
authors = ["Sebastian Hugentobler <shu@vanwa.ch>"]
repository = "https://code.vanwa.ch/shu/hesinde-sync"

1917
app/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

23
app/Cargo.toml Normal file
View File

@ -0,0 +1,23 @@
[package]
name = "hesinde-sync"
version = "0.1.0"
edition = "2021"
license = { workspace = true }
authors = { workspace = true }
repository = { workspace = true }
[dependencies]
entity = { path = "../entity" }
migration = { path = "../migration" }
anyhow = "1.0.86"
clap = { version = "4.5.8", features = ["env", "derive"] }
poem = "3.0.1"
poem-openapi = { version = "5.0.2", features = ["swagger-ui"] }
sea-orm = { workspace = true, features = ["with-time", "sqlx-sqlite", "sqlx-postgres", "sqlx-mysql", "runtime-tokio-rustls", "macros" ] }
serde = { workspace = true, features = ["derive"] }
thiserror = "1.0.61"
time = { workspace = true, features = ["macros", "serde", "formatting", "parsing" ] }
tokio = { version = "1.38.0", features = ["macros", "rt-multi-thread"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
uuid = { version = "1.10.0", features = ["v4", "fast-rng"] }

137
app/src/api.rs Normal file
View File

@ -0,0 +1,137 @@
use poem::{
error::ResponseError,
http::StatusCode,
web::{Data, Json},
Error, Result,
};
use poem_openapi::{
param::Path,
payload::{self, Json as JsonPayload},
Object, OpenApi,
};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use crate::{
app_state::AppState,
auth_middleware::{authorize, User},
db::DataStoreError,
error_response,
};
#[derive(Object, Deserialize)]
struct RegisterRequest {
username: String,
password: String,
}
#[derive(Object, Serialize)]
struct UserCreated {
username: String,
}
#[derive(Debug, Clone, Object, Deserialize)]
pub struct DocumentUpdate {
pub device: String,
pub device_id: String,
pub document: String,
pub percentage: f32,
pub progress: String,
}
#[derive(Debug, Clone, Object, Deserialize)]
pub struct DocumentProgress {
pub device: String,
pub device_id: String,
pub percentage: f32,
pub progress: String,
pub timestamp: i64,
}
impl ResponseError for DataStoreError {
fn status(&self) -> StatusCode {
StatusCode::INTERNAL_SERVER_ERROR
}
fn as_response(&self) -> poem::Response
where
Self: std::error::Error + Send + Sync + 'static,
{
error_response::create_and_log(self, || match self {
DataStoreError::DatabaseError(_) => (
"An internal server error ocurred".to_string(),
StatusCode::INTERNAL_SERVER_ERROR,
),
})
}
}
pub struct Api;
#[OpenApi]
impl Api {
#[oai(path = "/users/create", method = "post")]
async fn register(
&self,
req: Json<RegisterRequest>,
state: Data<&Arc<AppState>>,
) -> Result<payload::Response<payload::Json<UserCreated>>> {
let db = &state.0.db;
if db.get_user(&req.username).await?.is_some() {
Err(Error::from_status(StatusCode::CONFLICT))
} else {
db.add_user(&req.username, &req.password).await?;
Ok(payload::Response::new(payload::Json(UserCreated {
username: req.username.clone(),
}))
.status(StatusCode::CREATED))
}
}
#[oai(path = "/users/auth", method = "get", transform = "authorize")]
async fn login(&self) -> Result<payload::Response<()>> {
Ok(payload::Response::new(()).status(StatusCode::OK))
}
#[oai(path = "/syncs/progress", method = "put", transform = "authorize")]
async fn push_progress(
&self,
state: Data<&Arc<AppState>>,
user: Data<&User>,
doc: JsonPayload<DocumentUpdate>,
) -> Result<payload::Response<()>> {
let db = &state.db;
let doc_update = (user.name.clone(), doc.0).into();
db.update_position(&doc_update).await?;
Ok(payload::Response::new(()).status(StatusCode::OK))
}
#[oai(
path = "/syncs/progress/:doc_id",
method = "get",
transform = "authorize"
)]
async fn pull_progress(
&self,
state: Data<&Arc<AppState>>,
user: Data<&User>,
doc_id: Path<String>,
) -> Result<payload::Response<JsonPayload<DocumentProgress>>> {
let db = &state.db;
if let Some(doc) = db.get_position(&user.name, &doc_id).await? {
Ok(payload::Response::new(JsonPayload(DocumentProgress {
device: doc.device,
device_id: doc.device_id,
percentage: doc.percentage,
progress: doc.progress,
timestamp: doc.timestamp.assume_utc().unix_timestamp(),
}))
.status(StatusCode::OK))
} else {
Err(Error::from_status(StatusCode::NOT_FOUND))
}
}
}

6
app/src/app_state.rs Normal file
View File

@ -0,0 +1,6 @@
use crate::{cli::Config, db::Db};
pub struct AppState {
pub config: Config,
pub db: Db,
}

108
app/src/auth_middleware.rs Normal file
View File

@ -0,0 +1,108 @@
use poem::{
error::ResponseError, http::StatusCode, Endpoint, EndpointExt, Error, Middleware, Request,
Result,
};
use std::sync::Arc;
use thiserror::Error as ThisError;
use crate::{app_state::AppState, error_response};
pub fn authorize(ep: impl Endpoint) -> impl Endpoint {
ep.with(AuthMiddleware {})
}
#[derive(ThisError, Debug)]
pub enum AuthError {
#[error("failed to extract request data")]
DataExtractError,
}
#[derive(Debug, Clone)]
pub struct User {
pub name: String,
}
impl ResponseError for AuthError {
fn status(&self) -> StatusCode {
StatusCode::INTERNAL_SERVER_ERROR
}
fn as_response(&self) -> poem::Response
where
Self: std::error::Error + Send + Sync + 'static,
{
error_response::create_and_log(self, || match self {
AuthError::DataExtractError => (
"An internal server error ocurred".to_string(),
StatusCode::INTERNAL_SERVER_ERROR,
),
})
}
}
struct AuthMiddleware;
impl<E: Endpoint> Middleware<E> for AuthMiddleware {
type Output = AuthMiddlewareImpl<E>;
fn transform(&self, ep: E) -> Self::Output {
AuthMiddlewareImpl { ep }
}
}
/// The new endpoint type generated by the TokenMiddleware.
struct AuthMiddlewareImpl<E> {
ep: E,
}
const HEADER_USER: &str = "x-auth-user";
const HEADER_KEY: &str = "x-auth-key";
fn get_header(
header: &str,
req: &Request,
error_msg: &str,
error_status: StatusCode,
) -> Result<String> {
Ok(req
.header(header)
.ok_or(Error::from_string(error_msg, error_status))?
.to_string())
}
impl<E: Endpoint> Endpoint for AuthMiddlewareImpl<E> {
type Output = E::Output;
async fn call(&self, mut req: Request) -> Result<Self::Output> {
let username = get_header(
HEADER_USER,
&req,
"No user specified",
StatusCode::UNAUTHORIZED,
)?;
let key = get_header(
HEADER_KEY,
&req,
"No key specified",
StatusCode::UNAUTHORIZED,
)?;
let state = req
.data::<Arc<AppState>>()
.ok_or(AuthError::DataExtractError)?;
let db = &state.db;
if let Some(user) = db.get_user(&username).await? {
if user.key == key {
req.extensions_mut().insert(User { name: username });
return self.ep.call(req).await;
}
}
Err(Error::from_string(
"Unauthorized user",
StatusCode::FORBIDDEN,
))
}
}

16
app/src/cli.rs Normal file
View File

@ -0,0 +1,16 @@
//! Cli interface.
use clap::Parser;
/// Implementation of a koreader-sync server.
#[derive(Parser, Debug, Clone)]
#[command(version, about, long_about = None)]
pub struct Config {
/// Address to listen on
#[arg(short, long, env, default_value = "localhost:3030")]
pub address: String,
/// From which file to read the database connection string ("-" for stdin)
#[arg(short, long, env, default_value = "-")]
pub db_connection: String,
}

101
app/src/db.rs Normal file
View File

@ -0,0 +1,101 @@
use ::entity::{document, user};
use migration::{Migrator, MigratorTrait};
use sea_orm::{
ActiveModelTrait, ColumnTrait, Database, DatabaseConnection, DbErr, EntityTrait, QueryFilter,
Set,
};
use thiserror::Error;
use time::{OffsetDateTime, PrimitiveDateTime};
use tracing::debug;
use crate::api::DocumentUpdate;
#[derive(Error, Debug)]
pub enum DataStoreError {
#[error("database error")]
DatabaseError(#[from] DbErr),
}
#[derive(Debug)]
pub struct DocumentInsert {
id: String,
user: String,
device: String,
device_id: String,
percentage: f32,
progress: String,
}
impl From<(String, DocumentUpdate)> for DocumentInsert {
fn from(value: (String, DocumentUpdate)) -> Self {
Self {
id: value.1.document,
user: value.0,
device: value.1.device,
device_id: value.1.device_id,
percentage: value.1.percentage,
progress: value.1.progress,
}
}
}
pub struct Db {
connection: DatabaseConnection,
}
impl Db {
pub async fn add_user(&self, id: &str, key: &str) -> Result<user::Model, DataStoreError> {
let user = user::ActiveModel {
id: Set(id.to_owned()),
key: Set(key.to_owned()),
};
debug!("saving user {user:?}");
Ok(user.insert(&self.connection).await?)
}
pub async fn get_user(&self, id: &str) -> Result<Option<user::Model>, DataStoreError> {
Ok(user::Entity::find_by_id(id).one(&self.connection).await?)
}
pub async fn get_position(
&self,
user_id: &str,
doc_id: &str,
) -> Result<Option<document::Model>, DataStoreError> {
Ok(document::Entity::find()
.filter(document::Column::Id.eq(doc_id))
.filter(document::Column::User.eq(user_id))
.one(&self.connection)
.await?)
}
pub async fn update_position(&self, doc: &DocumentInsert) -> Result<(), DataStoreError> {
let now = OffsetDateTime::now_utc();
let now = PrimitiveDateTime::new(now.date(), now.time());
let old_doc = self.get_position(&doc.user, &doc.id).await?;
let new_doc = document::ActiveModel {
id: Set(doc.id.clone()),
user: Set(doc.user.clone()),
device: Set(doc.device.clone()),
device_id: Set(doc.device_id.clone()),
percentage: Set(doc.percentage),
progress: Set(doc.progress.clone()),
timestamp: Set(now),
};
match old_doc {
Some(_) => new_doc.update(&self.connection).await?,
None => new_doc.insert(&self.connection).await?,
};
Ok(())
}
}
pub async fn connect(connection_string: &str) -> Result<Db, DataStoreError> {
let connection: DatabaseConnection = Database::connect(connection_string).await?;
Migrator::up(&connection, None).await?;
Ok(Db { connection })
}

38
app/src/error_response.rs Normal file
View File

@ -0,0 +1,38 @@
use poem::{http::StatusCode, Body, Response};
use poem_openapi::Object;
use serde::Serialize;
use std::error::Error;
use tracing::error;
use uuid::Uuid;
#[derive(Object, Serialize)]
struct ErrorResponse {
id: String,
message: String,
}
pub fn create_and_log<F>(error: impl Error, cb: F) -> Response
where
F: Fn() -> (String, StatusCode),
{
let id = Uuid::new_v4().to_string();
error!("{id}: {error:?}");
let (message, status) = cb();
let response_json = ErrorResponse {
id: id.clone(),
message: message.clone(),
};
let response_body = Body::from_json(response_json).unwrap_or(Body::from_string(
format!(
"{{\"id\": \"{id}\", \"message\": \"{}\" }}",
message.clone()
)
.to_owned(),
));
Response::builder()
.status(status)
.header("Content-Type", "application/json")
.body(response_body)
}

46
app/src/lib.rs Normal file
View File

@ -0,0 +1,46 @@
use anyhow::Result;
use api::Api;
use app_state::AppState;
use cli::Config;
use poem::{
http::{uri::Scheme, Uri},
listener::TcpListener,
middleware::Tracing,
EndpointExt, Route,
};
use poem_openapi::OpenApiService;
use std::sync::Arc;
pub mod api;
pub mod app_state;
pub mod auth_middleware;
pub mod cli;
pub mod db;
pub mod error_response;
pub async fn run(args: &Config, db_url: &str) -> Result<()> {
let db = db::connect(db_url).await?;
let app_state = Arc::new(AppState {
config: args.clone(),
db,
});
const API_PATH: &str = "/api";
let api_uri = Uri::builder()
.scheme(Scheme::HTTP)
.authority(args.address.clone())
.path_and_query(API_PATH)
.build()?;
let api_service = OpenApiService::new(Api, "Hesinde Sync", "1.0").server(api_uri.to_string());
let ui = api_service.swagger_ui();
let app = Route::new()
.nest(API_PATH, api_service)
.nest("/", ui)
.data(app_state)
.with(Tracing);
Ok(poem::Server::new(TcpListener::bind(&args.address))
.run(app)
.await?)
}

41
app/src/main.rs Normal file
View File

@ -0,0 +1,41 @@
use std::{
fs::File,
io::{self, BufRead, Read},
};
use anyhow::Result;
use clap::Parser;
use hesinde_sync::cli::Config;
#[tokio::main]
async fn main() -> Result<()> {
if std::env::var_os("RUST_LOG").is_none() {
std::env::set_var("RUST_LOG", "info");
}
tracing_subscriber::fmt::init();
let args = Config::parse();
let db_url = read_db_url(&args.db_connection)?;
hesinde_sync::run(&args, &db_url).await
}
/// Read db url from file or stdin.
fn read_db_url(db_arg: &str) -> Result<String> {
if db_arg == "-" {
let stdin = io::stdin();
let mut buffer = String::new();
stdin.lock().read_to_string(&mut buffer)?;
let db_url = buffer.trim();
Ok(db_url.to_string())
} else {
let file = File::open(db_arg)?;
let mut reader = io::BufReader::new(file);
let mut first_line = String::new();
reader.read_line(&mut first_line)?;
let first_line = first_line.trim();
Ok(first_line.to_string())
}
}

11
entity/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "entity"
version = "0.1.0"
edition = "2021"
license.workspace = true
authors.workspace = true
repository.workspace = true
[dependencies]
sea-orm = { workspace = true, features = ["with-time", "sqlx-sqlite", "sqlx-postgres", "sqlx-mysql", "runtime-tokio-rustls", "macros" ] }
time = { workspace = true }

37
entity/src/document.rs Normal file
View File

@ -0,0 +1,37 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.15
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "document")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
pub user: String,
pub device: String,
pub device_id: String,
#[sea_orm(column_type = "Float")]
pub percentage: f32,
pub progress: String,
pub timestamp: TimeDateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::User",
to = "super::user::Column::Id",
on_update = "NoAction",
on_delete = "NoAction"
)]
User,
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

6
entity/src/lib.rs Normal file
View File

@ -0,0 +1,6 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.15
pub mod prelude;
pub mod document;
pub mod user;

4
entity/src/prelude.rs Normal file
View File

@ -0,0 +1,4 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.15
pub use super::document::Entity as Document;
pub use super::user::Entity as User;

25
entity/src/user.rs Normal file
View File

@ -0,0 +1,25 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.15
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
#[sea_orm(table_name = "user")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
pub key: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::document::Entity")]
Document,
}
impl Related<super::document::Entity> for Entity {
fn to() -> RelationDef {
Relation::Document.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

146
flake.lock Normal file
View File

@ -0,0 +1,146 @@
{
"nodes": {
"fenix": {
"inputs": {
"nixpkgs": "nixpkgs",
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1720074497,
"narHash": "sha256-sVef4TBLxzzm0rGUF/7YGhDtqZt9heXtSXVv7Q07W80=",
"owner": "nix-community",
"repo": "fenix",
"rev": "10a39229bce0788322200b6ff1a59f37d22e9ff7",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "fenix",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"naersk": {
"inputs": {
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1718727675,
"narHash": "sha256-uFsCwWYI2pUpt0awahSBorDUrUfBhaAiyz+BPTS2MHk=",
"owner": "nix-community",
"repo": "naersk",
"rev": "941ce6dc38762a7cfb90b5add223d584feed299b",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "master",
"repo": "naersk",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1719848872,
"narHash": "sha256-H3+EC5cYuq+gQW8y0lSrrDZfH71LB4DAf+TDFyvwCNA=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "00d80d13810dbfea8ab4ed1009b09100cca86ba8",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 0,
"narHash": "sha256-H3+EC5cYuq+gQW8y0lSrrDZfH71LB4DAf+TDFyvwCNA=",
"path": "/nix/store/j4jzjbr302cw5bl0n3pch5j9bh5qwmaj-source",
"type": "path"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"nixpkgs_3": {
"locked": {
"lastModified": 1719848872,
"narHash": "sha256-H3+EC5cYuq+gQW8y0lSrrDZfH71LB4DAf+TDFyvwCNA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "00d80d13810dbfea8ab4ed1009b09100cca86ba8",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"fenix": "fenix",
"flake-utils": "flake-utils",
"naersk": "naersk",
"nixpkgs": "nixpkgs_3"
}
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1719997308,
"narHash": "sha256-dQx1p/2ObV+iDriPWTBvELCwxe9ZbOimKTJKE3MA2FQ=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "cae997e3380363a906588f14c7b4587f39cf09f5",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

144
flake.nix Normal file
View File

@ -0,0 +1,144 @@
# thanks to https://code.betamike.com/micropelago/domani for the flake, I still do not completely understand it :)
{
description = "hesinde-sync project";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
naersk.url = "github:nix-community/naersk/master";
fenix.url = "github:nix-community/fenix";
};
outputs =
{
nixpkgs,
flake-utils,
naersk,
fenix,
...
}:
let
mkToolchain =
fenixPkgs:
fenixPkgs.fromToolchainFile {
file = ./rust-toolchain.toml;
sha256 = "sha256-Ngiz76YP4HTY75GGdH2P+APE/DEIx2R/Dn+BwwOyzZU=";
};
buildTargets = {
"x86_64-linux" = {
crossSystemConfig = "x86_64-unknown-linux-musl";
rustTarget = "x86_64-unknown-linux-musl";
};
"i686-linux" = {
crossSystemConfig = "i686-unknown-linux-musl";
rustTarget = "i686-unknown-linux-musl";
};
"aarch64-linux" = {
crossSystemConfig = "aarch64-unknown-linux-musl";
rustTarget = "aarch64-unknown-linux-musl";
};
};
eachSystem =
supportedSystems: callback:
builtins.foldl' (overall: system: overall // { ${system} = callback system; }) { } supportedSystems;
eachCrossSystem =
supportedSystems: callback:
eachSystem supportedSystems (
buildSystem:
builtins.foldl' (
inner: targetSystem: inner // { "cross-${targetSystem}" = callback buildSystem targetSystem; }
) { default = callback buildSystem buildSystem; } supportedSystems
);
mkPkgs =
buildSystem: targetSystem:
import nixpkgs (
{
system = buildSystem;
}
// (
if targetSystem == null then
{ }
else
{ crossSystem.config = buildTargets.${targetSystem}.crossSystemConfig; }
)
);
in
flake-utils.lib.eachDefaultSystem (
system:
let
pkgs = import nixpkgs { inherit system; };
toolchain = mkToolchain fenix.packages.${system};
in
{
devShells.default =
with pkgs;
mkShell {
buildInputs = [
geckodriver
toolchain
cargo-deny
rust-analyzer
sea-orm-cli
];
};
}
)
// {
packages = eachCrossSystem (builtins.attrNames buildTargets) (
buildSystem: targetSystem:
let
pkgs = mkPkgs buildSystem null;
pkgsCross = mkPkgs buildSystem targetSystem;
rustTarget = buildTargets.${targetSystem}.rustTarget;
fenixPkgs = fenix.packages.${buildSystem};
toolchain = mkToolchain fenixPkgs;
buildPackageAttrs =
if builtins.hasAttr "makeBuildPackageAttrs" buildTargets.${targetSystem} then
buildTargets.${targetSystem}.makeBuildPackageAttrs pkgsCross
else
{ };
naersk-lib = pkgs.callPackage naersk {
cargo = toolchain;
rustc = toolchain;
};
in
naersk-lib.buildPackage (
buildPackageAttrs
// rec {
src = ./.;
strictDeps = true;
doCheck = false;
OPENSSL_STATIC = "1";
OPENSSL_LIB_DIR = "${pkgsCross.pkgsStatic.openssl.out}/lib";
OPENSSL_INCLUDE_DIR = "${pkgsCross.pkgsStatic.openssl.dev}/include";
# Required because ring crate is special. This also seems to have
# fixed some issues with the x86_64-windows cross-compile :shrug:
TARGET_CC = "${pkgsCross.stdenv.cc}/bin/${pkgsCross.stdenv.cc.targetPrefix}cc";
CARGO_BUILD_TARGET = rustTarget;
CARGO_BUILD_RUSTFLAGS = [
"-C"
"target-feature=+crt-static"
# https://github.com/rust-lang/cargo/issues/4133
"-C"
"linker=${TARGET_CC}"
];
}
)
);
};
}

16
migration/Cargo.toml Normal file
View File

@ -0,0 +1,16 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "1", features = ["attributes", "tokio1"] }
[dependencies.sea-orm-migration]
version = "0.12.0"
features = ["sqlx-sqlite", "sqlx-postgres", "sqlx-mysql", "runtime-tokio-rustls"]

41
migration/README.md Normal file
View File

@ -0,0 +1,41 @@
# Running Migrator CLI
- Generate a new migration file
```sh
cargo run -- generate MIGRATION_NAME
```
- Apply all pending migrations
```sh
cargo run
```
```sh
cargo run -- up
```
- Apply first 10 pending migrations
```sh
cargo run -- up -n 10
```
- Rollback last applied migrations
```sh
cargo run -- down
```
- Rollback last 10 applied migrations
```sh
cargo run -- down -n 10
```
- Drop all tables from the database, then reapply all migrations
```sh
cargo run -- fresh
```
- Rollback all applied migrations, then reapply all migrations
```sh
cargo run -- refresh
```
- Rollback all applied migrations
```sh
cargo run -- reset
```
- Check the status of all migrations
```sh
cargo run -- status
```

16
migration/src/lib.rs Normal file
View File

@ -0,0 +1,16 @@
pub use sea_orm_migration::prelude::*;
mod m20220101_000001_users;
mod m20240707_142202_documents;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
Box::new(m20220101_000001_users::Migration),
Box::new(m20240707_142202_documents::Migration),
]
}
}

View File

@ -0,0 +1,33 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(User::Table)
.if_not_exists()
.col(ColumnDef::new(User::Id).string().not_null().primary_key())
.col(ColumnDef::new(User::Key).string().not_null())
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(User::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
pub enum User {
Table,
Id,
Key,
}

View File

@ -0,0 +1,56 @@
use sea_orm_migration::prelude::*;
use crate::m20220101_000001_users::User;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Document::Table)
.if_not_exists()
.col(
ColumnDef::new(Document::Id)
.string()
.not_null()
.primary_key(),
)
.col(ColumnDef::new(Document::User).string().not_null())
.col(ColumnDef::new(Document::Device).string().not_null())
.col(ColumnDef::new(Document::DeviceId).string().not_null())
.col(ColumnDef::new(Document::Percentage).float().not_null())
.col(ColumnDef::new(Document::Progress).string().not_null())
.col(ColumnDef::new(Document::Timestamp).date_time().not_null())
.foreign_key(
ForeignKey::create()
.name("fk-user-document")
.from(Document::Table, Document::User)
.to(User::Table, User::Id),
)
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Document::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum Document {
Table,
Id,
User,
Device,
DeviceId,
Percentage,
Progress,
Timestamp,
}

6
migration/src/main.rs Normal file
View File

@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

5
rust-toolchain.toml Normal file
View File

@ -0,0 +1,5 @@
[toolchain]
channel = "stable"
components = [ "clippy", "rustfmt" ]
targets = [ "i686-unknown-linux-musl", "aarch64-unknown-linux-musl", "x86_64-unknown-linux-musl" ]
profile = "minimal"