Rewrite from mongodb to sqlx #1

Merged
GKaszewski merged 2 commits from sqlx into master 2025-07-20 13:16:32 +00:00
13 changed files with 720 additions and 1004 deletions
Showing only changes of commit 27e9119123 - Show all commits

View File

@@ -1,2 +1 @@
MONGODB_URI=mongodb://localhost:27017 DATABASE_URL="sqlite:rick_and_morty.db"
DB_NAME=rick_and_morty

1
.gitignore vendored
View File

@@ -1,2 +1,3 @@
/target /target
.env .env
*.db

1388
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -7,14 +7,18 @@ edition = "2024"
anyhow = "1.0.98" anyhow = "1.0.98"
axum = { version = "0.8.4", features = ["macros"] } axum = { version = "0.8.4", features = ["macros"] }
dotenvy = "0.15.7" dotenvy = "0.15.7"
futures-util = "0.3.31"
http = "1.3.1" http = "1.3.1"
mongodb = "3.2.4"
once_cell = "1.21.3"
reqwest = { version = "0.12.22", features = ["json"] } reqwest = { version = "0.12.22", features = ["json"] }
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140" serde_json = "1.0.140"
tokio = { version = "1.46.1", features = [] } sqlx = { version = "0.8.6", features = [
"sqlite",
"runtime-tokio",
"tls-native-tls",
"macros",
"uuid",
] }
tokio = { version = "1.46.1", features = ["full"] }
tower = { version = "0.5.2", features = ["util"] } tower = { version = "0.5.2", features = ["util"] }
tower-http = { version = "0.6.6", features = ["cors", "fs", "trace"] } tower-http = { version = "0.6.6", features = ["cors", "fs", "trace"] }
tracing = "0.1.41" tracing = "0.1.41"

View File

@@ -10,15 +10,20 @@ WORKDIR /app
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y pkg-config libssl-dev build-essential ca-certificates apt-get install -y pkg-config libssl-dev build-essential ca-certificates
COPY migrations/ migrations/
COPY src/ src/ COPY src/ src/
COPY Cargo.toml Cargo.lock ./ COPY Cargo.toml Cargo.lock ./
RUN cargo build --release RUN cargo build --release
FROM debian:bookworm-slim FROM debian:bookworm-slim
WORKDIR /app WORKDIR /app
RUN apt-get update && apt-get install -y ca-certificates openssl && rm -rf /var/lib/apt/lists/* RUN apt-get update && apt-get install -y ca-certificates openssl sqlite3 && rm -rf /var/lib/apt/lists/*
RUN mkdir -p /app/data
COPY --from=backend-builder /app/target/release/rick-and-morty . COPY --from=backend-builder /app/target/release/rick-and-morty .
COPY --from=backend-builder /app/migrations ./migrations
COPY --from=frontend-builder /app/dist ./frontend/dist COPY --from=frontend-builder /app/dist ./frontend/dist
EXPOSE 8000 EXPOSE 8000
CMD ["./rick-and-morty"] COPY entrypoint.sh /app/
RUN chmod +x /app/entrypoint.sh
ENTRYPOINT ["/app/entrypoint.sh"]

View File

@@ -1,26 +1,15 @@
services: services:
mongo:
image: mongo:6.0
container_name: rick_and_morty_mongo
restart: unless-stopped
ports:
- "27017:27017"
environment:
MONGO_INITDB_DATABASE: rick_and_morty
volumes:
- mongo-data:/data/db
app: app:
build: . build: .
container_name: rick_and_morty_app container_name: rick_and_morty_app
depends_on:
- mongo
environment: environment:
MONGODB_URI: mongodb://mongo:27017 DATABASE_URL: "sqlite:///app/data/rick_and_morty.db"
DB_NAME: rick_and_morty
BIND_ADDR: 0.0.0.0:8000 BIND_ADDR: 0.0.0.0:8000
ports: ports:
- "8000:8000" - "8000:8000"
restart: unless-stopped restart: unless-stopped
volumes:
- rick_and_morty_data:/app/data
volumes: volumes:
mongo-data: rick_and_morty_data:

3
entrypoint.sh Normal file
View File

@@ -0,0 +1,3 @@
#!/bin/sh
chown -R $(id -u):$(id -g) /app/data
exec ./rick-and-morty

18
migrations/001_init.sql Normal file
View File

@@ -0,0 +1,18 @@
CREATE TABLE characters (
id INTEGER PRIMARY KEY AUTOINCREMENT,
rmid INTEGER NOT NULL,
name TEXT NOT NULL,
status TEXT NOT NULL,
species TEXT NOT NULL,
type TEXT NOT NULL,
gender TEXT NOT NULL,
origin_name TEXT NOT NULL,
origin_url TEXT NOT NULL,
location_name TEXT NOT NULL,
location_url TEXT NOT NULL,
image TEXT NOT NULL,
episode TEXT NOT NULL, -- store as comma-separated or JSON
url TEXT NOT NULL,
created TEXT NOT NULL,
elo_rating REAL NOT NULL
);

View File

@@ -1,11 +1,7 @@
use mongodb::{
Client,
bson::{doc, to_document},
options::UpdateOptions,
};
use reqwest::Client as HttpClient; use reqwest::Client as HttpClient;
use rick_and_morty::models::{Character, OriginOrLocation}; use rick_and_morty::models::{Character, OriginOrLocation};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{Row, SqlitePool};
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
struct ApiCharacter { struct ApiCharacter {
@@ -27,17 +23,20 @@ struct ApiCharacter {
impl From<ApiCharacter> for Character { impl From<ApiCharacter> for Character {
fn from(api: ApiCharacter) -> Self { fn from(api: ApiCharacter) -> Self {
Character { Character {
id: None, // always None for new/incoming data id: 0, // Ignored for new/incoming data; SQLite autoincrements
rmid: api.id, rmid: api.id,
name: api.name, name: api.name,
status: api.status, status: api.status,
species: api.species, species: api.species,
r#type: api.character_type, r#type: api.character_type,
gender: api.gender, gender: api.gender,
origin: api.origin, origin_name: api.origin.name,
location: api.location, origin_url: api.origin.url,
location_name: api.location.name,
location_url: api.location.url,
image: api.image, image: api.image,
episode: api.episode, // Store as JSON string
episode: serde_json::to_string(&api.episode).unwrap(),
url: api.url, url: api.url,
created: api.created, created: api.created,
elo_rating: 1000.0, elo_rating: 1000.0,
@@ -58,13 +57,9 @@ fn init_tracing() {
async fn main() -> anyhow::Result<()> { async fn main() -> anyhow::Result<()> {
init_tracing(); init_tracing();
dotenvy::dotenv().ok(); dotenvy::dotenv().ok();
let db_uri =
std::env::var("MONGODB_URI").unwrap_or_else(|_| "mongodb://localhost:27017".to_string());
let db_name = std::env::var("DB_NAME").unwrap_or_else(|_| "rick_and_morty".to_string());
let client = Client::with_uri_str(&db_uri).await?; let db_url = std::env::var("DATABASE_URL").expect("DATABASE_URL not set");
let db = client.database(&db_name); let pool = SqlitePool::connect(&db_url).await?;
let collection = db.collection::<Character>("characters");
tracing::info!("Starting to fetch characters from Rick and Morty API"); tracing::info!("Starting to fetch characters from Rick and Morty API");
let http = HttpClient::new(); let http = HttpClient::new();
@@ -92,30 +87,77 @@ async fn main() -> anyhow::Result<()> {
count = all_characters.len(), count = all_characters.len(),
"Fetched all characters, starting DB upsert" "Fetched all characters, starting DB upsert"
); );
let options = UpdateOptions::builder().upsert(true).build();
// let insert_result = collection.insert_many(all_characters.clone()).await?;
for character in &all_characters { for character in &all_characters {
let filter = doc! { "rmid": character.rmid }; // Try to fetch existing character by rmid
let mut set_doc = to_document(character)?; let row = sqlx::query("SELECT id, elo_rating FROM characters WHERE rmid = ?")
set_doc.remove("elo_rating"); // Do NOT overwrite existing Elo .bind(character.rmid)
let update = doc! { .fetch_optional(&pool)
"$set": set_doc, .await?;
"$setOnInsert": { "elo_rating": 1000.0 }
}; if let Some(row) = row {
if let Err(e) = collection // Exists: update all fields except elo_rating
.update_one(filter, update) let id: i64 = row.get("id");
.with_options(Some(options.clone())) sqlx::query(
.await "UPDATE characters SET
{ name = ?, status = ?, species = ?, type = ?, gender = ?,
tracing::error!(error = ?e, id = character.rmid, name = %character.name, "Failed to upsert character"); origin_name = ?, origin_url = ?,
location_name = ?, location_url = ?,
image = ?, episode = ?, url = ?, created = ?
WHERE id = ?",
)
.bind(&character.name)
.bind(&character.status)
.bind(&character.species)
.bind(&character.r#type)
.bind(&character.gender)
.bind(&character.origin_name)
.bind(&character.origin_url)
.bind(&character.location_name)
.bind(&character.location_url)
.bind(&character.image)
.bind(&character.episode)
.bind(&character.url)
.bind(&character.created)
.bind(id)
.execute(&pool)
.await?;
tracing::info!(id = character.rmid, name = %character.name, "Updated character (no elo overwrite)");
} else {
// Insert new character with default elo_rating
sqlx::query(
"INSERT INTO characters (
rmid, name, status, species, type, gender,
origin_name, origin_url, location_name, location_url,
image, episode, url, created, elo_rating
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
)
.bind(character.rmid)
.bind(&character.name)
.bind(&character.status)
.bind(&character.species)
.bind(&character.r#type)
.bind(&character.gender)
.bind(&character.origin_name)
.bind(&character.origin_url)
.bind(&character.location_name)
.bind(&character.location_url)
.bind(&character.image)
.bind(&character.episode)
.bind(&character.url)
.bind(&character.created)
.bind(1000.0)
.execute(&pool)
.await?;
tracing::info!(id = character.rmid, name = %character.name, "Inserted new character");
} }
tracing::info!(id = character.rmid, name = %character.name, "Upserted character");
} }
// tracing::info!("Inserted {} characters", insert_result.inserted_ids.len()); // tracing::info!("Inserted {} characters", insert_result.inserted_ids.len());
let character_count = collection.count_documents(doc! {}).await?; let character_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM characters")
.fetch_one(&pool)
.await?;
tracing::info!( tracing::info!(
count = character_count, count = character_count,
"Total characters in DB after import" "Total characters in DB after import"

View File

@@ -1,11 +1,6 @@
use mongodb::{Client, Database}; use sqlx::sqlite::SqlitePool;
use once_cell::sync::OnceCell;
pub static DB: OnceCell<Database> = OnceCell::new(); pub async fn connect_db(db_url: &str) -> Result<SqlitePool, sqlx::Error> {
let pool = SqlitePool::connect(db_url).await?;
pub async fn connect_db(uri: &str, db_name: &str) -> mongodb::error::Result<()> { Ok(pool)
let client = Client::with_uri_str(uri).await?;
let db = client.database(db_name);
DB.set(db).ok();
Ok(())
} }

View File

@@ -20,19 +20,17 @@ fn init_tracing() {
} }
#[tokio::main] #[tokio::main]
async fn main() { async fn main() -> anyhow::Result<()> {
init_tracing(); init_tracing();
dotenvy::dotenv().ok(); dotenvy::dotenv().ok();
let db_uri = std::env::var("MONGODB_URI").expect("MONGODB_URI not set");
let db_name = std::env::var("DB_NAME").unwrap_or_else(|_| "rick_and_morty".to_string());
let address = std::env::var("BIND_ADDR").unwrap_or_else(|_| "0.0.0.0:8000".to_string()); let address = std::env::var("BIND_ADDR").unwrap_or_else(|_| "0.0.0.0:8000".to_string());
let db_url = std::env::var("DATABASE_URL").expect("DATABASE_URL not set");
db::connect_db(&db_uri, &db_name) let db = db::connect_db(&db_url).await?;
.await
.expect("Failed to connect to database");
let db = db::DB.get().expect("Database not initialized"); // migrations
sqlx::migrate!().run(&db).await?;
let cors = CorsLayer::new() let cors = CorsLayer::new()
// allow `GET` and `POST` when accessing the resource // allow `GET` and `POST` when accessing the resource
@@ -44,7 +42,7 @@ async fn main() {
let app = Router::new() let app = Router::new()
.route("/characters", get(routes::get_characters)) .route("/characters", get(routes::get_characters))
.route("/rate", post(routes::rate)) .route("/rate", post(routes::rate))
.with_state(db.clone()) .with_state(db)
.layer(TraceLayer::new_for_http()) .layer(TraceLayer::new_for_http())
.layer(cors) .layer(cors)
.fallback_service(ServeDir::new("frontend/dist").append_index_html_on_directories(true)); .fallback_service(ServeDir::new("frontend/dist").append_index_html_on_directories(true));
@@ -55,5 +53,9 @@ async fn main() {
println!("Listening on {}", listener.local_addr().unwrap()); println!("Listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app).await.unwrap(); axum::serve(listener, app).await?;
tracing::info!("Server started successfully");
Ok(())
} }

View File

@@ -1,26 +1,27 @@
use mongodb::bson::{doc, oid::ObjectId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::prelude::FromRow;
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone, FromRow)]
pub struct OriginOrLocation { pub struct OriginOrLocation {
pub name: String, pub name: String,
pub url: String, pub url: String,
} }
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone, FromRow)]
pub struct Character { pub struct Character {
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")] pub id: i64,
pub id: Option<ObjectId>, // Mongo _id
pub rmid: i32, // Rick&Morty ID, don't confuse with _id pub rmid: i32, // Rick&Morty ID, don't confuse with _id
pub name: String, pub name: String,
pub status: String, pub status: String,
pub species: String, pub species: String,
pub r#type: String, pub r#type: String,
pub gender: String, pub gender: String,
pub origin: OriginOrLocation, pub origin_name: String,
pub location: OriginOrLocation, pub origin_url: String,
pub location_name: String,
pub location_url: String,
pub image: String, pub image: String,
pub episode: Vec<String>, pub episode: String,
pub url: String, pub url: String,
pub created: String, pub created: String,
pub elo_rating: f64, pub elo_rating: f64,

View File

@@ -1,10 +1,7 @@
use axum::response::Html; use axum::response::Html;
use axum::{Json, extract::State, http::StatusCode}; use axum::{Json, extract::State, http::StatusCode};
use futures_util::stream::TryStreamExt;
use mongodb::Database;
use mongodb::bson::doc;
use mongodb::bson::oid::ObjectId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{Sqlite, SqlitePool, Transaction};
use crate::models::Character; use crate::models::Character;
use crate::utils::calculate_elo; use crate::utils::calculate_elo;
@@ -15,7 +12,7 @@ pub struct RateRequest {
loser_id: String, loser_id: String,
} }
#[derive(Deserialize, Serialize)] #[derive(Serialize)]
pub struct RateResponse { pub struct RateResponse {
winner: Character, winner: Character,
loser: Character, loser: Character,
@@ -24,94 +21,74 @@ pub struct RateResponse {
static K_FACTOR: f64 = 32.0; // K-factor for Elo rating system static K_FACTOR: f64 = 32.0; // K-factor for Elo rating system
pub async fn get_characters( pub async fn get_characters(
State(db): State<Database>, State(db): State<SqlitePool>,
) -> Result<Json<Vec<Character>>, StatusCode> { ) -> Result<Json<Vec<Character>>, StatusCode> {
let collection = db.collection::<Character>("characters"); let characters =
let pipeline = vec![doc! { "$sort": { "elo_rating": -1 } }]; sqlx::query_as::<_, Character>("SELECT * FROM characters ORDER BY elo_rating DESC")
.fetch_all(&db)
let mut cursor = collection
.aggregate(pipeline)
.await .await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let mut characters = Vec::new();
while let Some(result) = cursor
.try_next()
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
{
let character: Character =
mongodb::bson::from_document(result).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
characters.push(character);
}
tracing::info!(count = characters.len(), "Fetched characters from DB");
Ok(Json(characters)) Ok(Json(characters))
} }
#[axum::debug_handler] #[axum::debug_handler]
pub async fn rate( pub async fn rate(
State(db): State<Database>, State(db): State<SqlitePool>,
Json(req): Json<RateRequest>, Json(req): Json<RateRequest>,
) -> Result<Json<RateResponse>, StatusCode> { ) -> Result<Json<RateResponse>, StatusCode> {
let collection = db.collection::<Character>("characters"); // 1. Fetch characters by ID
let winner: Character = sqlx::query_as("SELECT * FROM characters WHERE id = ?")
let winner_oid = ObjectId::parse_str(&req.winner_id).map_err(|_| StatusCode::BAD_REQUEST)?; .bind(&req.winner_id)
let loser_oid = ObjectId::parse_str(&req.loser_id).map_err(|_| StatusCode::BAD_REQUEST)?; .fetch_optional(&db)
let winner = collection
.find_one(doc! { "_id": winner_oid })
.await .await
.ok() .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.flatten()
.ok_or(StatusCode::NOT_FOUND)?; .ok_or(StatusCode::NOT_FOUND)?;
let loser = collection
.find_one(doc! { "_id": loser_oid }) let loser: Character = sqlx::query_as("SELECT * FROM characters WHERE id = ?")
.bind(&req.loser_id)
.fetch_optional(&db)
.await .await
.ok() .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.flatten()
.ok_or(StatusCode::NOT_FOUND)?; .ok_or(StatusCode::NOT_FOUND)?;
let (new_winner_elo, new_loser_elo) = let (new_winner_elo, new_loser_elo) =
calculate_elo(winner.elo_rating, loser.elo_rating, K_FACTOR); calculate_elo(winner.elo_rating, loser.elo_rating, K_FACTOR);
collection let mut tx: Transaction<'_, Sqlite> = db
.update_one( .begin()
doc! {"_id": &winner_oid},
doc! { "$set": { "elo_rating": new_winner_elo } },
)
.await .await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
collection sqlx::query("UPDATE characters SET elo_rating = ? WHERE id = ?")
.update_one( .bind(new_winner_elo)
doc! {"_id": &loser_oid}, .bind(&req.winner_id)
doc! { "$set": { "elo_rating": new_loser_elo } }, .execute(&mut *tx)
)
.await .await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
// return resposnse with characters' new Elo ratings sqlx::query("UPDATE characters SET elo_rating = ? WHERE id = ?")
let updated_winner = collection .bind(new_loser_elo)
.find_one(doc! { "_id": winner_oid }) .bind(&req.loser_id)
.execute(&mut *tx)
.await .await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
.ok_or(StatusCode::NOT_FOUND)?;
let updated_loser = collection
.find_one(doc! { "_id": loser_oid })
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.ok_or(StatusCode::NOT_FOUND)?;
tracing::info!( tx.commit()
"Rated characters: winner_id = {}, loser_id = {}, new_winner_elo = .await
{}, new_loser_elo = {}", .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
req.winner_id,
req.loser_id, let updated_winner: Character = sqlx::query_as("SELECT * FROM characters WHERE id = ?")
new_winner_elo, .bind(&req.winner_id)
new_loser_elo .fetch_one(&db)
); .await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let updated_loser: Character = sqlx::query_as("SELECT * FROM characters WHERE id = ?")
.bind(&req.loser_id)
.fetch_one(&db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
Ok(Json(RateResponse { Ok(Json(RateResponse {
winner: updated_winner, winner: updated_winner,