feat: Refactor media service to remove extracted EXIF data handling and update job payload with thumbnail path
This commit is contained in:
@@ -6,9 +6,16 @@ use std::{
|
||||
|
||||
use clap::Parser;
|
||||
use libertas_core::{
|
||||
config::AppConfig, error::{CoreError, CoreResult}, media_utils::{extract_exif_data, get_storage_path_and_date}, models::{Media, MediaMetadata, User}, repositories::{MediaMetadataRepository, MediaRepository, UserRepository}
|
||||
config::AppConfig,
|
||||
error::{CoreError, CoreResult},
|
||||
media_utils::{extract_exif_data, get_storage_path_and_date},
|
||||
models::{Media, MediaMetadata, User},
|
||||
repositories::{MediaMetadataRepository, MediaRepository, UserRepository},
|
||||
};
|
||||
use libertas_infra::factory::{
|
||||
build_database_pool, build_media_metadata_repository, build_media_repository,
|
||||
build_user_repository,
|
||||
};
|
||||
use libertas_infra::factory::{build_database_pool, build_media_metadata_repository, build_media_repository, build_user_repository};
|
||||
use serde_json;
|
||||
use sha2::{Digest, Sha256};
|
||||
use tokio::fs;
|
||||
@@ -61,7 +68,7 @@ async fn main() -> Result<()> {
|
||||
nats_client,
|
||||
};
|
||||
|
||||
let user = state
|
||||
let mut user = state
|
||||
.user_repo
|
||||
.find_by_username(&cli.username)
|
||||
.await?
|
||||
@@ -77,7 +84,7 @@ async fn main() -> Result<()> {
|
||||
if entry.file_type().is_file() {
|
||||
let path = entry.path();
|
||||
|
||||
match process_file(path, &user, &state).await {
|
||||
match process_file(path, &mut user, &state).await {
|
||||
Ok(media) => {
|
||||
println!("-> Imported: '{}'", media.original_filename);
|
||||
}
|
||||
@@ -93,7 +100,11 @@ async fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> CoreResult<Media> {
|
||||
async fn process_file(
|
||||
file_path: &Path,
|
||||
user: &mut User,
|
||||
state: &ImporterState,
|
||||
) -> CoreResult<Media> {
|
||||
let file_bytes = fs::read(file_path).await?;
|
||||
let file_size = file_bytes.len() as i64;
|
||||
let hash = format!("{:x}", Sha256::digest(&file_bytes));
|
||||
@@ -103,10 +114,10 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let user_after_check = state.user_repo.find_by_id(user.id).await?.unwrap();
|
||||
if &user_after_check.storage_used + file_size > user_after_check.storage_quota {
|
||||
if user.storage_used + file_size > user.storage_quota {
|
||||
return Err(CoreError::Auth("Storage quota exceeded".to_string()));
|
||||
}
|
||||
|
||||
if state.media_repo.find_by_hash(&hash).await?.is_some() {
|
||||
return Err(CoreError::Duplicate(
|
||||
"A file with this content already exists".to_string(),
|
||||
@@ -115,10 +126,7 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
|
||||
|
||||
let extracted_data = match extract_exif_data(file_path).await {
|
||||
Ok(data) => {
|
||||
println!(
|
||||
" -> Parsed metadata: Tags={}",
|
||||
data.all_tags.len()
|
||||
);
|
||||
println!(" -> Parsed metadata: Tags={}", data.all_tags.len());
|
||||
data
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -131,8 +139,7 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
|
||||
}
|
||||
};
|
||||
|
||||
let (storage_path_buf, _date_taken) =
|
||||
get_storage_path_and_date(&extracted_data, &filename);
|
||||
let (storage_path_buf, _date_taken) = get_storage_path_and_date(&extracted_data, &filename);
|
||||
|
||||
let mut dest_path_buf = PathBuf::from(&state.config.media_library_path);
|
||||
dest_path_buf.push(&storage_path_buf);
|
||||
@@ -146,7 +153,7 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
|
||||
fs::copy(file_path, &dest_path_buf).await?;
|
||||
|
||||
let storage_path_str = storage_path_buf.to_string_lossy().to_string();
|
||||
|
||||
|
||||
let mime_type = mime_guess::from_path(file_path)
|
||||
.first_or_octet_stream()
|
||||
.to_string();
|
||||
@@ -175,18 +182,16 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
|
||||
}
|
||||
|
||||
if !metadata_models.is_empty() {
|
||||
state
|
||||
.metadata_repo
|
||||
.create_batch(&metadata_models)
|
||||
.await?;
|
||||
state.metadata_repo.create_batch(&metadata_models).await?;
|
||||
}
|
||||
|
||||
|
||||
|
||||
state
|
||||
.user_repo
|
||||
.update_storage_used(user.id, file_size)
|
||||
.await?;
|
||||
|
||||
user.storage_used += file_size;
|
||||
|
||||
let job_payload = serde_json::json!({ "media_id": media_model.id });
|
||||
state
|
||||
.nats_client
|
||||
|
||||
Reference in New Issue
Block a user