feat: Add thumbnail generation feature and update media model
- Updated workspace members to include `libertas_importer`. - Added a new migration to add `thumbnail_path` column to `media` table. - Enhanced configuration structures to include `thumbnail_config`. - Modified `Media` model to include `thumbnail_path`. - Updated `MediaRepository` trait and its implementation to handle thumbnail path updates. - Created `ThumbnailPlugin` for generating thumbnails based on media configurations. - Integrated thumbnail generation into the media processing workflow. - Updated dependencies in `libertas_worker` and `libertas_importer` for image processing.
This commit is contained in:
25
libertas_importer/src/config.rs
Normal file
25
libertas_importer/src/config.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use libertas_core::{
|
||||
config::{Config, DatabaseConfig, DatabaseType},
|
||||
error::CoreResult,
|
||||
};
|
||||
|
||||
pub fn load_config() -> CoreResult<Config> {
|
||||
Ok(Config {
|
||||
database: DatabaseConfig {
|
||||
db_type: DatabaseType::Postgres,
|
||||
url: "postgres://libertas:libertas_password@localhost:5436/libertas_db".to_string(),
|
||||
},
|
||||
server_address: "127.0.0.1:8080".to_string(),
|
||||
jwt_secret: "super_secret_jwt_key".to_string(),
|
||||
media_library_path: "media_library".to_string(),
|
||||
broker_url: "nats://localhost:4222".to_string(),
|
||||
max_upload_size_mb: Some(100),
|
||||
default_storage_quota_gb: Some(10),
|
||||
allowed_sort_columns: Some(vec![
|
||||
"date_taken".to_string(),
|
||||
"created_at".to_string(),
|
||||
"original_filename".to_string(),
|
||||
]),
|
||||
thumbnail_config: None,
|
||||
})
|
||||
}
|
||||
194
libertas_importer/src/main.rs
Normal file
194
libertas_importer/src/main.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use std::{path::{Path, PathBuf}, sync::Arc};
|
||||
use anyhow::Result;
|
||||
|
||||
use chrono::{DateTime, Datelike, NaiveDateTime, Utc};
|
||||
use clap::Parser;
|
||||
use libertas_core::{config::Config, error::{CoreError, CoreResult}, models::{Media, User}, repositories::{MediaRepository, UserRepository}};
|
||||
use libertas_infra::factory::{build_database_pool, build_media_repository, build_user_repository};
|
||||
use nom_exif::{AsyncMediaParser, AsyncMediaSource, Exif, ExifIter, ExifTag};
|
||||
use sha2::{Digest, Sha256};
|
||||
use uuid::Uuid;
|
||||
use walkdir::WalkDir;
|
||||
use tokio::fs;
|
||||
|
||||
mod config;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[arg(short, long)]
|
||||
username: String,
|
||||
#[arg(short, long)]
|
||||
path: String,
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
recursive: bool,
|
||||
}
|
||||
|
||||
struct ImporterState {
|
||||
config: Config,
|
||||
media_repo: Arc<dyn MediaRepository>,
|
||||
user_repo: Arc<dyn UserRepository>,
|
||||
nats_client: async_nats::Client,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
println!("Starting import for user: '{}' from path '{}'...", cli.username, cli.path);
|
||||
|
||||
let config = config::load_config()?;
|
||||
let db_pool = build_database_pool(&config.database).await?;
|
||||
let media_repo = build_media_repository(&config, db_pool.clone()).await?;
|
||||
let user_repo = build_user_repository(&config.database, db_pool.clone()).await?;
|
||||
let nats_client = async_nats::connect(&config.broker_url).await?;
|
||||
|
||||
println!("Connected to database and NATS broker.");
|
||||
|
||||
let state = ImporterState {
|
||||
config,
|
||||
media_repo,
|
||||
user_repo,
|
||||
nats_client,
|
||||
};
|
||||
|
||||
let user = state
|
||||
.user_repo
|
||||
.find_by_username(&cli.username)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("User '{}' not found", cli.username))?;
|
||||
|
||||
println!("User '{}' found with ID: {}", cli.username, user.id);
|
||||
println!("Storage: {} / {}", user.storage_used, user.storage_quota);
|
||||
|
||||
let max_depth = if cli.recursive { usize::MAX } else { 1 };
|
||||
let walker = WalkDir::new(&cli.path).max_depth(max_depth).into_iter();
|
||||
|
||||
for entry in walker.filter_map(Result::ok) {
|
||||
if entry.file_type().is_file() {
|
||||
let path = entry.path();
|
||||
|
||||
match process_file(path, &user, &state).await {
|
||||
Ok(media) => {
|
||||
println!("-> Imported: '{}'", media.original_filename);
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("!! Skipped: '{}' (Reason: {})", path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Import process completed.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn process_file(
|
||||
file_path: &Path,
|
||||
user: &User,
|
||||
state: &ImporterState,
|
||||
) -> CoreResult<Media> {
|
||||
let file_bytes = fs::read(file_path).await?;
|
||||
let file_size = file_bytes.len() as i64;
|
||||
let hash = format!("{:x}", Sha256::digest(&file_bytes));
|
||||
let filename = file_path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let user_after_check = state.user_repo.find_by_id(user.id).await?.unwrap();
|
||||
if &user_after_check.storage_used + file_size > user_after_check.storage_quota {
|
||||
return Err(CoreError::Auth("Storage quota exceeded".to_string()));
|
||||
}
|
||||
if state.media_repo.find_by_hash(&hash).await?.is_some() {
|
||||
return Err(CoreError::Duplicate(
|
||||
"A file with this content already exists".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let (width, height, location, date_taken) =
|
||||
match AsyncMediaSource::file_path(file_path).await {
|
||||
Ok(ms) => {
|
||||
if ms.has_exif() {
|
||||
let mut parser = AsyncMediaParser::new();
|
||||
if let Ok(iter) = parser.parse::<_,_, ExifIter>(ms).await {
|
||||
let gps = iter.parse_gps_info().ok().flatten().map(|g| g.format_iso6709());
|
||||
println!(" -> EXIF GPS Info: {:?}", gps);
|
||||
let exif: Exif = iter.into();
|
||||
let modified_date = exif.get(ExifTag::ModifyDate).and_then(|f| f.as_str()).and_then(parse_exif_datetime);
|
||||
println!(" -> EXIF ModifyDate: {:?}", modified_date);
|
||||
let w = exif.get(ExifTag::ExifImageWidth).and_then(|f| f.as_u32()).map(|v| v as i32);
|
||||
println!(" -> EXIF ExifImageWidth: {:?}", w);
|
||||
let h = exif.get(ExifTag::ExifImageHeight).and_then(|f| f.as_u32()).map(|v| v as i32);
|
||||
println!(" -> EXIF ExifImageHeight: {:?}", h);
|
||||
let dt = exif.get(ExifTag::DateTimeOriginal).and_then(|f| f.as_str()).and_then(parse_exif_datetime);
|
||||
println!(" -> EXIF DateTimeOriginal: {:?}", dt);
|
||||
(w, h, gps, dt)
|
||||
} else {
|
||||
(None, None, None, None)
|
||||
}
|
||||
} else {
|
||||
(None, None, None, None)
|
||||
}
|
||||
}
|
||||
Err(_) => (None, None, None, None),
|
||||
};
|
||||
|
||||
let file_date = date_taken.unwrap_or_else(|| chrono::Utc::now());
|
||||
let year = file_date.year().to_string();
|
||||
let month = format!("{:02}", file_date.month());
|
||||
let mut dest_path_buf = PathBuf::from(&state.config.media_library_path);
|
||||
dest_path_buf.push(&year);
|
||||
dest_path_buf.push(&month);
|
||||
|
||||
fs::create_dir_all(&dest_path_buf).await?;
|
||||
|
||||
dest_path_buf.push(&filename);
|
||||
|
||||
fs::copy(file_path, &dest_path_buf).await?;
|
||||
|
||||
let storage_path_str = PathBuf::from(&year)
|
||||
.join(&month)
|
||||
.join(&filename)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let mime_type = mime_guess::from_path(file_path)
|
||||
.first_or_octet_stream()
|
||||
.to_string();
|
||||
|
||||
let media_model = Media {
|
||||
id: Uuid::new_v4(),
|
||||
owner_id: user.id,
|
||||
storage_path: storage_path_str,
|
||||
original_filename: filename,
|
||||
mime_type,
|
||||
hash,
|
||||
created_at: chrono::Utc::now(),
|
||||
extracted_location: location,
|
||||
width: width,
|
||||
height: height,
|
||||
date_taken: date_taken,
|
||||
thumbnail_path: None,
|
||||
};
|
||||
|
||||
state.media_repo.create(&media_model).await?;
|
||||
state.user_repo
|
||||
.update_storage_used(user.id, file_size)
|
||||
.await?;
|
||||
|
||||
let job_payload = serde_json::json!({ "media_id": media_model.id });
|
||||
state.nats_client
|
||||
.publish("media.new".to_string(), job_payload.to_string().into())
|
||||
.await
|
||||
.map_err(|e| CoreError::Unknown(format!("Failed to publish NATS message: {}", e)))?;
|
||||
|
||||
Ok(media_model)
|
||||
}
|
||||
|
||||
fn parse_exif_datetime(s: &str) -> Option<DateTime<Utc>> {
|
||||
NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S")
|
||||
.ok()
|
||||
.map(|ndt| ndt.and_local_timezone(Utc).unwrap())
|
||||
}
|
||||
Reference in New Issue
Block a user