feat: enhance media management with EXIF data extraction, metadata filtering, and storage path generation
refactor: update configuration handling to use environment variables and improve code organization
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,3 +1,5 @@
|
||||
target/
|
||||
.sqlx/
|
||||
media_library/
|
||||
.ai/
|
||||
.env
|
||||
11
Cargo.lock
generated
11
Cargo.lock
generated
@@ -822,6 +822,15 @@ dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "envy"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f47e0157f2cb54f5ae1bd371b30a2ae4311e1c028f575cd4e81de7353215965"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "equator"
|
||||
version = "0.4.2"
|
||||
@@ -1660,6 +1669,8 @@ dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"dotenvy",
|
||||
"envy",
|
||||
"futures",
|
||||
"nom-exif",
|
||||
"serde",
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
DATABASE_URL="postgres://postgres:postgres@localhost:5432/libertas_db"
|
||||
@@ -1,25 +1 @@
|
||||
use libertas_core::{
|
||||
config::{Config, DatabaseConfig, DatabaseType},
|
||||
error::CoreResult,
|
||||
};
|
||||
|
||||
pub fn load_config() -> CoreResult<Config> {
|
||||
Ok(Config {
|
||||
database: DatabaseConfig {
|
||||
db_type: DatabaseType::Postgres,
|
||||
url: "postgres://libertas:libertas_password@localhost:5436/libertas_db".to_string(),
|
||||
},
|
||||
server_address: "127.0.0.1:8080".to_string(),
|
||||
jwt_secret: "super_secret_jwt_key".to_string(),
|
||||
media_library_path: "media_library".to_string(),
|
||||
broker_url: "nats://localhost:4222".to_string(),
|
||||
max_upload_size_mb: Some(100),
|
||||
default_storage_quota_gb: Some(10),
|
||||
allowed_sort_columns: Some(vec![
|
||||
"date_taken".to_string(),
|
||||
"created_at".to_string(),
|
||||
"original_filename".to_string(),
|
||||
]),
|
||||
thumbnail_config: None,
|
||||
})
|
||||
}
|
||||
pub use libertas_core::config::load_config;
|
||||
@@ -1,5 +1,5 @@
|
||||
use axum::{extract::{FromRequestParts, Query}, http::request::Parts};
|
||||
use libertas_core::{error::CoreError, schema::{FilterParams, ListMediaOptions, SortOrder, SortParams}};
|
||||
use libertas_core::{error::CoreError, schema::{FilterParams, ListMediaOptions, MetadataFilter, SortOrder, SortParams}};
|
||||
|
||||
use crate::{error::ApiError, schema::ListMediaParams, state::AppState};
|
||||
|
||||
@@ -18,8 +18,25 @@ impl From<ListMediaParams> for ListMediaOptions {
|
||||
}
|
||||
});
|
||||
|
||||
let metadata_filters = if params.metadata.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
params.metadata
|
||||
.into_iter()
|
||||
.filter_map(|s| {
|
||||
s.split_once(":").map(|(key, value)| MetadataFilter {
|
||||
tag_name: key.to_string(),
|
||||
tag_value: value.to_string(),
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
};
|
||||
|
||||
let filter = Some(FilterParams {
|
||||
// e.g., mime_type: params.mime_type
|
||||
mime_type: params.mime_type,
|
||||
metadata_filters,
|
||||
});
|
||||
|
||||
ListMediaOptions { sort, filter }
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use libertas_core::{
|
||||
config::Config,
|
||||
config::{AppConfig},
|
||||
error::{CoreError, CoreResult},
|
||||
};
|
||||
use libertas_infra::factory::{
|
||||
build_album_repository, build_album_share_repository, build_database_pool,
|
||||
build_media_repository, build_user_repository,
|
||||
build_album_repository, build_album_share_repository, build_database_pool, build_media_metadata_repository, build_media_repository, build_user_repository
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@@ -18,7 +17,7 @@ use crate::{
|
||||
state::AppState,
|
||||
};
|
||||
|
||||
pub async fn build_app_state(config: Config) -> CoreResult<AppState> {
|
||||
pub async fn build_app_state(config: AppConfig) -> CoreResult<AppState> {
|
||||
let nats_client = async_nats::connect(&config.broker_url)
|
||||
.await
|
||||
.map_err(|e| CoreError::Config(format!("Failed to connect to NATS: {}", e)))?;
|
||||
@@ -30,6 +29,8 @@ pub async fn build_app_state(config: Config) -> CoreResult<AppState> {
|
||||
let media_repo = build_media_repository(&config, db_pool.clone()).await?;
|
||||
let album_repo = build_album_repository(&config.database, db_pool.clone()).await?;
|
||||
let album_share_repo = build_album_share_repository(&config.database, db_pool.clone()).await?;
|
||||
let media_metadata_repo =
|
||||
build_media_metadata_repository(&config.database, db_pool.clone()).await?;
|
||||
|
||||
let hasher = Arc::new(Argon2Hasher::default());
|
||||
let tokenizer = Arc::new(JwtGenerator::new(config.jwt_secret.clone()));
|
||||
@@ -44,6 +45,7 @@ pub async fn build_app_state(config: Config) -> CoreResult<AppState> {
|
||||
media_repo.clone(),
|
||||
user_repo.clone(),
|
||||
album_share_repo.clone(),
|
||||
media_metadata_repo.clone(),
|
||||
config.clone(),
|
||||
nats_client.clone(),
|
||||
));
|
||||
|
||||
@@ -13,7 +13,7 @@ use tower::ServiceExt;
|
||||
use tower_http::services::ServeFile;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{error::ApiError, extractors::query_options::ApiListMediaOptions, middleware::auth::UserId, schema::MediaResponse, state::AppState};
|
||||
use crate::{error::ApiError, extractors::query_options::ApiListMediaOptions, middleware::auth::UserId, schema::{MediaDetailsResponse, MediaMetadataResponse, MediaResponse}, state::AppState};
|
||||
|
||||
|
||||
impl From<Media> for MediaResponse {
|
||||
@@ -101,9 +101,22 @@ async fn get_media_details(
|
||||
State(state): State<AppState>,
|
||||
UserId(user_id): UserId,
|
||||
Path(id): Path<Uuid>,
|
||||
) -> Result<Json<MediaResponse>, ApiError> {
|
||||
let media = state.media_service.get_media_details(id, user_id).await?;
|
||||
Ok(Json(media.into()))
|
||||
) -> Result<Json<MediaDetailsResponse>, ApiError> {
|
||||
let bundle = state.media_service.get_media_details(id, user_id).await?;
|
||||
let response = MediaDetailsResponse {
|
||||
id: bundle.media.id,
|
||||
storage_path: bundle.media.storage_path,
|
||||
original_filename: bundle.media.original_filename,
|
||||
mime_type: bundle.media.mime_type,
|
||||
hash: bundle.media.hash,
|
||||
metadata: bundle.metadata
|
||||
.into_iter()
|
||||
.map(MediaMetadataResponse::from)
|
||||
.collect(),
|
||||
};
|
||||
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
async fn delete_media(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use libertas_core::models::{Album, AlbumPermission};
|
||||
use libertas_core::{models::{Album, AlbumPermission, MediaMetadata}};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
@@ -15,9 +15,9 @@ pub struct MediaResponse {
|
||||
pub struct ListMediaParams {
|
||||
pub sort_by: Option<String>,
|
||||
pub order: Option<String>,
|
||||
// You can add future filters here, e.g.:
|
||||
// pub mime_type: Option<String>,
|
||||
}
|
||||
pub mime_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub metadata: Vec<String>,}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CreateAlbumRequest {
|
||||
@@ -92,4 +92,32 @@ pub struct UserResponse {
|
||||
pub id: Uuid,
|
||||
pub username: String,
|
||||
pub email: String,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct MediaMetadataResponse {
|
||||
pub source: String,
|
||||
pub tag_name: String,
|
||||
pub tag_value: String,
|
||||
}
|
||||
|
||||
impl From<MediaMetadata> for MediaMetadataResponse {
|
||||
fn from(metadata: MediaMetadata) -> Self {
|
||||
Self {
|
||||
source: metadata.source.as_str().to_string(),
|
||||
tag_name: metadata.tag_name,
|
||||
tag_value: metadata.tag_value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct MediaDetailsResponse {
|
||||
pub id: uuid::Uuid,
|
||||
pub storage_path: String,
|
||||
pub original_filename: String,
|
||||
pub mime_type: String,
|
||||
pub hash: String,
|
||||
pub metadata: Vec<MediaMetadataResponse>,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use std::{path::{Path, PathBuf}, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::Datelike;
|
||||
use futures::stream::StreamExt;
|
||||
use libertas_core::{
|
||||
authz,
|
||||
config::Config,
|
||||
error::{CoreError, CoreResult},
|
||||
models::Media,
|
||||
repositories::{AlbumShareRepository, MediaRepository, UserRepository},
|
||||
schema::{ListMediaOptions, UploadMediaData},
|
||||
services::MediaService,
|
||||
authz, config::AppConfig, error::{CoreError, CoreResult}, media_utils::{ExtractedExif, extract_exif_data_from_bytes, get_storage_path_and_date}, models::{Media, MediaBundle, MediaMetadata}, repositories::{AlbumShareRepository, MediaMetadataRepository, MediaRepository, UserRepository}, schema::{ListMediaOptions, UploadMediaData}, services::MediaService
|
||||
};
|
||||
use serde_json::json;
|
||||
use sha2::{Digest, Sha256};
|
||||
@@ -21,7 +14,8 @@ pub struct MediaServiceImpl {
|
||||
repo: Arc<dyn MediaRepository>,
|
||||
user_repo: Arc<dyn UserRepository>,
|
||||
album_share_repo: Arc<dyn AlbumShareRepository>,
|
||||
config: Config,
|
||||
metadata_repo: Arc<dyn MediaMetadataRepository>,
|
||||
config: AppConfig,
|
||||
nats_client: async_nats::Client,
|
||||
}
|
||||
|
||||
@@ -30,13 +24,15 @@ impl MediaServiceImpl {
|
||||
repo: Arc<dyn MediaRepository>,
|
||||
user_repo: Arc<dyn UserRepository>,
|
||||
album_share_repo: Arc<dyn AlbumShareRepository>,
|
||||
config: Config,
|
||||
metadata_repo: Arc<dyn MediaMetadataRepository>,
|
||||
config: AppConfig,
|
||||
nats_client: async_nats::Client,
|
||||
) -> Self {
|
||||
Self {
|
||||
repo,
|
||||
user_repo,
|
||||
album_share_repo,
|
||||
metadata_repo,
|
||||
config,
|
||||
nats_client,
|
||||
}
|
||||
@@ -55,10 +51,22 @@ impl MediaService for MediaServiceImpl {
|
||||
self.check_upload_prerequisites(owner_id, file_size, &hash)
|
||||
.await?;
|
||||
|
||||
let storage_path = self.persist_media_file(&file_bytes, &filename).await?;
|
||||
let file_bytes_clone = file_bytes.clone();
|
||||
let extracted_data = tokio::task::spawn_blocking(move || {
|
||||
extract_exif_data_from_bytes(&file_bytes_clone)
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
let (storage_path_buf, _date_taken) =
|
||||
get_storage_path_and_date(&extracted_data, &filename);
|
||||
|
||||
let storage_path_str = self
|
||||
.persist_media_file(&file_bytes, &storage_path_buf)
|
||||
.await?;
|
||||
|
||||
let media = self
|
||||
.persist_media_metadata(owner_id, filename, mime_type, storage_path, hash, file_size)
|
||||
.persist_media_metadata(owner_id, filename, mime_type, storage_path_str, hash, file_size, extracted_data)
|
||||
.await?;
|
||||
|
||||
self.publish_new_media_job(media.id).await?;
|
||||
@@ -66,7 +74,7 @@ impl MediaService for MediaServiceImpl {
|
||||
Ok(media)
|
||||
}
|
||||
|
||||
async fn get_media_details(&self, id: Uuid, user_id: Uuid) -> CoreResult<Media> {
|
||||
async fn get_media_details(&self, id: Uuid, user_id: Uuid) -> CoreResult<MediaBundle> {
|
||||
let media = self
|
||||
.repo
|
||||
.find_by_id(id)
|
||||
@@ -79,20 +87,23 @@ impl MediaService for MediaServiceImpl {
|
||||
.await?
|
||||
.ok_or(CoreError::NotFound("User".to_string(), user_id))?;
|
||||
|
||||
if authz::is_owner(user_id, &media) || authz::is_admin(&user) {
|
||||
return Ok(media);
|
||||
}
|
||||
|
||||
let is_shared = self
|
||||
if !authz::is_owner(user_id, &media) && !authz::is_admin(&user) {
|
||||
let is_shared = self
|
||||
.album_share_repo
|
||||
.is_media_in_shared_album(id, user_id)
|
||||
.await?;
|
||||
|
||||
if is_shared {
|
||||
return Ok(media);
|
||||
tracing::warn!("User {} attempted to access media {} without permission, media owner is: {}", user_id, id, media.owner_id);
|
||||
|
||||
if !is_shared {
|
||||
tracing::warn!("User {} attempted to access media {} without permission, media owner is: {}", user_id, id, media.owner_id);
|
||||
return Err(CoreError::Auth("Access denied".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Err(CoreError::Auth("Access denied".to_string()))
|
||||
let metadata = self.metadata_repo.find_by_media_id(id).await?;
|
||||
|
||||
Ok(MediaBundle { media, metadata })
|
||||
}
|
||||
|
||||
async fn list_user_media(&self, user_id: Uuid, options: ListMediaOptions) -> CoreResult<Vec<Media>> {
|
||||
@@ -216,27 +227,18 @@ impl MediaServiceImpl {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn persist_media_file(&self, file_bytes: &[u8], filename: &str) -> CoreResult<String> {
|
||||
let now = chrono::Utc::now();
|
||||
let year = now.year().to_string();
|
||||
let month = format!("{:02}", now.month());
|
||||
async fn persist_media_file(&self, file_bytes: &[u8], storage_path: &Path) -> CoreResult<String> {
|
||||
let mut dest_path = PathBuf::from(&self.config.media_library_path);
|
||||
dest_path.push(year.clone());
|
||||
dest_path.push(month.clone());
|
||||
dest_path.push(storage_path);
|
||||
|
||||
fs::create_dir_all(&dest_path).await?;
|
||||
dest_path.push(filename);
|
||||
|
||||
let storage_path_str = PathBuf::from(&year)
|
||||
.join(&month)
|
||||
.join(filename)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
if let Some(parent) = dest_path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
||||
let mut file = fs::File::create(&dest_path).await?;
|
||||
file.write_all(&file_bytes).await?;
|
||||
|
||||
Ok(storage_path_str)
|
||||
Ok(storage_path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
async fn persist_media_metadata(
|
||||
@@ -247,6 +249,7 @@ impl MediaServiceImpl {
|
||||
storage_path: String,
|
||||
hash: String,
|
||||
file_size: i64,
|
||||
extracted_data: ExtractedExif,
|
||||
) -> CoreResult<Media> {
|
||||
let media_model = Media {
|
||||
id: Uuid::new_v4(),
|
||||
@@ -260,6 +263,22 @@ impl MediaServiceImpl {
|
||||
};
|
||||
|
||||
self.repo.create(&media_model).await?;
|
||||
|
||||
let mut metadata_models = Vec::new();
|
||||
for (source, tag_name, tag_value) in extracted_data.all_tags {
|
||||
metadata_models.push(MediaMetadata {
|
||||
id: Uuid::new_v4(),
|
||||
media_id: media_model.id,
|
||||
source,
|
||||
tag_name,
|
||||
tag_value,
|
||||
});
|
||||
}
|
||||
|
||||
if !metadata_models.is_empty() {
|
||||
self.metadata_repo.create_batch(&metadata_models).await?;
|
||||
}
|
||||
|
||||
self.user_repo
|
||||
.update_storage_used(owner_id, file_size)
|
||||
.await?;
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use libertas_core::{
|
||||
config::Config,
|
||||
config::AppConfig,
|
||||
error::{CoreError, CoreResult},
|
||||
models::{Role, User},
|
||||
repositories::UserRepository,
|
||||
@@ -17,7 +17,7 @@ pub struct UserServiceImpl {
|
||||
repo: Arc<dyn UserRepository>,
|
||||
hasher: Arc<dyn PasswordHasher>,
|
||||
tokenizer: Arc<dyn TokenGenerator>,
|
||||
config: Arc<Config>,
|
||||
config: Arc<AppConfig>,
|
||||
}
|
||||
|
||||
impl UserServiceImpl {
|
||||
@@ -25,7 +25,7 @@ impl UserServiceImpl {
|
||||
repo: Arc<dyn UserRepository>,
|
||||
hasher: Arc<dyn PasswordHasher>,
|
||||
tokenizer: Arc<dyn TokenGenerator>,
|
||||
config: Arc<Config>,
|
||||
config: Arc<AppConfig>,
|
||||
) -> Self {
|
||||
Self {
|
||||
repo,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use libertas_core::{
|
||||
config::Config,
|
||||
config::AppConfig,
|
||||
services::{AlbumService, MediaService, UserService},
|
||||
};
|
||||
|
||||
@@ -14,5 +14,5 @@ pub struct AppState {
|
||||
pub album_service: Arc<dyn AlbumService>,
|
||||
pub token_generator: Arc<dyn TokenGenerator>,
|
||||
pub nats_client: async_nats::Client,
|
||||
pub config: Config,
|
||||
pub config: AppConfig,
|
||||
}
|
||||
|
||||
@@ -13,3 +13,5 @@ thiserror = "2.0.17"
|
||||
uuid = {version = "1.18.1", features = ["v4", "serde"] }
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
nom-exif = { version = "2.5.4", features = ["serde", "async", "tokio"] }
|
||||
dotenvy = "0.15.7"
|
||||
envy = "0.4.2"
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::error::{CoreError, CoreResult};
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum DatabaseType {
|
||||
Postgres,
|
||||
Sqlite,
|
||||
@@ -30,6 +33,48 @@ pub struct ThumbnailConfig {
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct Config {
|
||||
#[serde(rename = "DATABASE_URL")]
|
||||
pub database_url: String,
|
||||
|
||||
#[serde(rename = "DATABASE_DB_TYPE")]
|
||||
pub database_db_type: DatabaseType,
|
||||
|
||||
#[serde(rename = "SERVER_ADDRESS")]
|
||||
pub server_address: String,
|
||||
|
||||
#[serde(rename = "JWT_SECRET")]
|
||||
pub jwt_secret: String,
|
||||
|
||||
#[serde(rename = "MEDIA_LIBRARY_PATH")]
|
||||
pub media_library_path: String,
|
||||
|
||||
#[serde(rename = "BROKER_URL")]
|
||||
pub broker_url: String,
|
||||
|
||||
#[serde(default = "default_max_upload_size")]
|
||||
#[serde(rename = "MAX_UPLOAD_SIZE_MB")]
|
||||
pub max_upload_size_mb: u32,
|
||||
|
||||
#[serde(default = "default_storage_quota")]
|
||||
#[serde(rename = "DEFAULT_STORAGE_QUOTA_GB")]
|
||||
pub default_storage_quota_gb: u64,
|
||||
|
||||
#[serde(default = "default_allowed_sort_columns")]
|
||||
#[serde(rename = "ALLOWED_SORT_COLUMNS")]
|
||||
pub allowed_sort_columns: Vec<String>,
|
||||
|
||||
#[serde(flatten)]
|
||||
pub thumbnail_config: Option<ThumbnailConfig>,
|
||||
}
|
||||
|
||||
fn default_max_upload_size() -> u32 { 100 }
|
||||
fn default_storage_quota() -> u64 { 10 }
|
||||
fn default_allowed_sort_columns() -> Vec<String> {
|
||||
vec!["created_at".to_string(), "original_filename".to_string()]
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppConfig {
|
||||
pub database: DatabaseConfig,
|
||||
pub server_address: String,
|
||||
pub jwt_secret: String,
|
||||
@@ -40,3 +85,29 @@ pub struct Config {
|
||||
pub allowed_sort_columns: Option<Vec<String>>,
|
||||
pub thumbnail_config: Option<ThumbnailConfig>,
|
||||
}
|
||||
|
||||
pub fn load_config() -> CoreResult<AppConfig> {
|
||||
// Load the .env file at that specific path
|
||||
let env_path = dotenvy::dotenv()
|
||||
.map_err(|e| CoreError::Config(format!("Failed to load .env file: {}", e)))?;
|
||||
|
||||
println!("Loaded config from {}", env_path.display());
|
||||
|
||||
let config = envy::from_env::<Config>()
|
||||
.map_err(|e| CoreError::Config(format!("Failed to load config from env: {}", e)))?;
|
||||
|
||||
Ok(AppConfig {
|
||||
database: DatabaseConfig {
|
||||
db_type: config.database_db_type,
|
||||
url: config.database_url,
|
||||
},
|
||||
server_address: config.server_address,
|
||||
jwt_secret: config.jwt_secret,
|
||||
media_library_path: config.media_library_path,
|
||||
broker_url: config.broker_url,
|
||||
max_upload_size_mb: Some(config.max_upload_size_mb),
|
||||
default_storage_quota_gb: Some(config.default_storage_quota_gb),
|
||||
allowed_sort_columns: Some(config.allowed_sort_columns),
|
||||
thumbnail_config: config.thumbnail_config,
|
||||
})
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::path::Path;
|
||||
use std::{io::Cursor, path::{Path, PathBuf}};
|
||||
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use nom_exif::{AsyncMediaParser, AsyncMediaSource, ExifIter, TrackInfo};
|
||||
use chrono::{DateTime, Datelike, NaiveDateTime, Utc};
|
||||
use nom_exif::{AsyncMediaParser, AsyncMediaSource, ExifIter, MediaParser, MediaSource, TrackInfo};
|
||||
|
||||
use crate::{error::{CoreError, CoreResult}, models::MediaMetadataSource};
|
||||
|
||||
@@ -10,10 +10,76 @@ pub struct ExtractedExif {
|
||||
pub all_tags: Vec<(MediaMetadataSource, String, String)>,
|
||||
}
|
||||
|
||||
const EXIF_DATE_FORMATS: &[&str] = &[
|
||||
"%Y:%m:%d %H:%M:%S",
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
"%Y/%m/%d %H:%M:%S",
|
||||
"%Y-%m-%dT%H:%M:%S",
|
||||
];
|
||||
|
||||
pub fn parse_exif_datetime(s: &str) -> Option<DateTime<Utc>> {
|
||||
NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S")
|
||||
.ok()
|
||||
.map(|ndt| ndt.and_local_timezone(Utc).unwrap())
|
||||
for format in EXIF_DATE_FORMATS {
|
||||
if let Ok(ndt) = NaiveDateTime::parse_from_str(s, format) {
|
||||
return Some(ndt.and_local_timezone(Utc).unwrap());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn extract_exif_data_from_bytes(
|
||||
bytes: &[u8],
|
||||
) -> CoreResult<ExtractedExif> {
|
||||
let ms = MediaSource::seekable(Cursor::new(bytes))
|
||||
.map_err(|e| CoreError::Unknown(format!("Failed to open bytes for EXIF: {}", e)))?;
|
||||
|
||||
let mut parser = MediaParser::new();
|
||||
|
||||
let all_tags = if ms.has_exif() {
|
||||
let iter: ExifIter = match parser.parse(ms) {
|
||||
Ok(iter) => iter,
|
||||
Err(e) => {
|
||||
println!("Could not parse EXIF: {}", e);
|
||||
return Ok(ExtractedExif::default());
|
||||
}
|
||||
};
|
||||
iter.into_iter()
|
||||
.filter_map(|mut x| {
|
||||
let res = x.take_result();
|
||||
match res {
|
||||
Ok(v) => Some((
|
||||
MediaMetadataSource::Exif,
|
||||
x.tag()
|
||||
.map(|t| t.to_string())
|
||||
.unwrap_or_else(|| format!("Unknown(0x{:04x})", x.tag_code())),
|
||||
v.to_string(),
|
||||
)),
|
||||
Err(e) => {
|
||||
println!("!! EXIF parsing error for tag 0x{:04x}: {}", x.tag_code(), e);
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
match parser.parse::<_, _, TrackInfo>(ms) {
|
||||
Ok(info) => info
|
||||
.into_iter()
|
||||
.map(|x| {
|
||||
(
|
||||
MediaMetadataSource::TrackInfo,
|
||||
x.0.to_string(),
|
||||
x.1.to_string(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
Err(e) => {
|
||||
println!("Could not parse TrackInfo: {}", e);
|
||||
return Ok(ExtractedExif::default());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(ExtractedExif { all_tags })
|
||||
}
|
||||
|
||||
pub async fn extract_exif_data(file_path: &Path) -> CoreResult<ExtractedExif> {
|
||||
@@ -74,4 +140,26 @@ pub async fn extract_exif_data(file_path: &Path) -> CoreResult<ExtractedExif> {
|
||||
};
|
||||
|
||||
Ok(ExtractedExif { all_tags })
|
||||
}
|
||||
|
||||
pub fn get_storage_path_and_date(
|
||||
extracted_data: &ExtractedExif,
|
||||
filename: &str,
|
||||
) -> (PathBuf, Option<DateTime<Utc>>) {
|
||||
let date_taken_str = extracted_data.all_tags.iter()
|
||||
.find(|(source, tag_name, _)| {
|
||||
*source == MediaMetadataSource::Exif &&
|
||||
(tag_name == "DateTimeOriginal" || tag_name == "ModifyDate")
|
||||
})
|
||||
.map(|(_, _, tag_value)| tag_value);
|
||||
|
||||
let date_taken = date_taken_str.and_then(|s| parse_exif_datetime(s));
|
||||
let file_date = date_taken.clone().unwrap_or_else(chrono::Utc::now);
|
||||
|
||||
let year = file_date.year().to_string();
|
||||
let month = format!("{:02}", file_date.month());
|
||||
|
||||
let storage_path = PathBuf::from(&year).join(&month).join(filename);
|
||||
|
||||
(storage_path, date_taken)
|
||||
}
|
||||
@@ -145,3 +145,9 @@ pub struct AlbumShare {
|
||||
pub user_id: uuid::Uuid,
|
||||
pub permission: AlbumPermission,
|
||||
}
|
||||
|
||||
|
||||
pub struct MediaBundle {
|
||||
pub media: Media,
|
||||
pub metadata: Vec<MediaMetadata>,
|
||||
}
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::{
|
||||
config::Config, error::CoreResult, models::Media, repositories::{AlbumRepository, MediaMetadataRepository, MediaRepository, UserRepository}
|
||||
config::AppConfig, error::CoreResult, models::Media, repositories::{AlbumRepository, MediaMetadataRepository, MediaRepository, UserRepository}
|
||||
};
|
||||
|
||||
pub struct PluginData {
|
||||
@@ -16,7 +16,7 @@ pub struct PluginContext {
|
||||
pub user_repo: Arc<dyn UserRepository>,
|
||||
pub metadata_repo: Arc<dyn MediaMetadataRepository>,
|
||||
pub media_library_path: String,
|
||||
pub config: Arc<Config>,
|
||||
pub config: Arc<AppConfig>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
||||
@@ -57,8 +57,9 @@ pub struct SortParams {
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FilterParams {
|
||||
// In the future, you can add fields like:
|
||||
// pub mime_type: Option<String>,
|
||||
pub mime_type: Option<String>,
|
||||
pub metadata_filters: Option<Vec<MetadataFilter>>,
|
||||
// In the future, we can add fields like:
|
||||
// pub date_range: Option<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>)>,
|
||||
}
|
||||
|
||||
@@ -67,4 +68,10 @@ pub struct ListMediaOptions {
|
||||
pub sort: Option<SortParams>,
|
||||
pub filter: Option<FilterParams>,
|
||||
// pub pagination: Option<PaginationParams>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MetadataFilter {
|
||||
pub tag_name: String,
|
||||
pub tag_value: String,
|
||||
}
|
||||
@@ -3,7 +3,7 @@ use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
error::CoreResult,
|
||||
models::{Album, Media, User},
|
||||
models::{Album, Media, MediaBundle, User},
|
||||
schema::{
|
||||
AddMediaToAlbumData, CreateAlbumData, CreateUserData, ListMediaOptions, LoginUserData, ShareAlbumData, UpdateAlbumData, UploadMediaData
|
||||
},
|
||||
@@ -12,7 +12,7 @@ use crate::{
|
||||
#[async_trait]
|
||||
pub trait MediaService: Send + Sync {
|
||||
async fn upload_media(&self, data: UploadMediaData<'_>) -> CoreResult<Media>;
|
||||
async fn get_media_details(&self, id: Uuid, user_id: Uuid) -> CoreResult<Media>;
|
||||
async fn get_media_details(&self, id: Uuid, user_id: Uuid) -> CoreResult<MediaBundle>;
|
||||
async fn list_user_media(&self, user_id: Uuid, options: ListMediaOptions) -> CoreResult<Vec<Media>>;
|
||||
async fn get_media_filepath(&self, id: Uuid, user_id: Uuid) -> CoreResult<String>;
|
||||
async fn delete_media(&self, id: Uuid, user_id: Uuid) -> CoreResult<()>;
|
||||
|
||||
@@ -1,25 +1 @@
|
||||
use libertas_core::{
|
||||
config::{Config, DatabaseConfig, DatabaseType},
|
||||
error::CoreResult,
|
||||
};
|
||||
|
||||
pub fn load_config() -> CoreResult<Config> {
|
||||
Ok(Config {
|
||||
database: DatabaseConfig {
|
||||
db_type: DatabaseType::Postgres,
|
||||
url: "postgres://postgres:postgres@localhost:5432/libertas_db".to_string(),
|
||||
},
|
||||
server_address: "127.0.0.1:8080".to_string(),
|
||||
jwt_secret: "super_secret_jwt_key".to_string(),
|
||||
media_library_path: "media_library".to_string(),
|
||||
broker_url: "nats://localhost:4222".to_string(),
|
||||
max_upload_size_mb: Some(100),
|
||||
default_storage_quota_gb: Some(10),
|
||||
allowed_sort_columns: Some(vec![
|
||||
"date_taken".to_string(),
|
||||
"created_at".to_string(),
|
||||
"original_filename".to_string(),
|
||||
]),
|
||||
thumbnail_config: None,
|
||||
})
|
||||
}
|
||||
pub use libertas_core::config::load_config;
|
||||
@@ -4,10 +4,9 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use chrono::Datelike;
|
||||
use clap::Parser;
|
||||
use libertas_core::{
|
||||
config::Config, error::{CoreError, CoreResult}, media_utils::{extract_exif_data, parse_exif_datetime}, models::{Media, MediaMetadata, MediaMetadataSource, User}, repositories::{MediaMetadataRepository, MediaRepository, UserRepository}
|
||||
config::AppConfig, error::{CoreError, CoreResult}, media_utils::{extract_exif_data, get_storage_path_and_date}, models::{Media, MediaMetadata, User}, repositories::{MediaMetadataRepository, MediaRepository, UserRepository}
|
||||
};
|
||||
use libertas_infra::factory::{build_database_pool, build_media_metadata_repository, build_media_repository, build_user_repository};
|
||||
use serde_json;
|
||||
@@ -30,7 +29,7 @@ struct Cli {
|
||||
}
|
||||
|
||||
struct ImporterState {
|
||||
config: Config,
|
||||
config: AppConfig,
|
||||
media_repo: Arc<dyn MediaRepository>,
|
||||
user_repo: Arc<dyn UserRepository>,
|
||||
metadata_repo: Arc<dyn MediaMetadataRepository>,
|
||||
@@ -132,31 +131,22 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
|
||||
}
|
||||
};
|
||||
|
||||
let date_taken_str = extracted_data.all_tags.iter()
|
||||
.find(|(source, tag_name, _)| {
|
||||
*source == MediaMetadataSource::Exif &&
|
||||
(tag_name == "DateTimeOriginal" || tag_name == "ModifyDate")
|
||||
})
|
||||
.map(|(_, _, tag_value)| tag_value);
|
||||
let (storage_path_buf, _date_taken) =
|
||||
get_storage_path_and_date(&extracted_data, &filename);
|
||||
|
||||
let date_taken = date_taken_str.and_then(|s| parse_exif_datetime(s));
|
||||
let file_date = date_taken.unwrap_or_else(chrono::Utc::now);
|
||||
let year = file_date.year().to_string();
|
||||
let month = format!("{:02}", file_date.month());
|
||||
let mut dest_path_buf = PathBuf::from(&state.config.media_library_path);
|
||||
dest_path_buf.push(&year);
|
||||
dest_path_buf.push(&month);
|
||||
dest_path_buf.push(&storage_path_buf);
|
||||
|
||||
fs::create_dir_all(&dest_path_buf).await?;
|
||||
println!(" -> Storing file at: {}", dest_path_buf.display());
|
||||
|
||||
dest_path_buf.push(&filename);
|
||||
if let Some(parent) = dest_path_buf.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
||||
fs::copy(file_path, &dest_path_buf).await?;
|
||||
let storage_path_str = PathBuf::from(&year)
|
||||
.join(&month)
|
||||
.join(&filename)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let storage_path_str = storage_path_buf.to_string_lossy().to_string();
|
||||
|
||||
let mime_type = mime_guess::from_path(file_path)
|
||||
.first_or_octet_stream()
|
||||
.to_string();
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use libertas_core::{
|
||||
config::{Config, DatabaseConfig, DatabaseType},
|
||||
config::{AppConfig, DatabaseConfig, DatabaseType},
|
||||
error::{CoreError, CoreResult},
|
||||
repositories::UserRepository,
|
||||
};
|
||||
@@ -47,7 +47,7 @@ pub async fn build_user_repository(
|
||||
}
|
||||
|
||||
pub async fn build_media_repository(
|
||||
config: &Config,
|
||||
config: &AppConfig,
|
||||
pool: DatabasePool,
|
||||
) -> CoreResult<Arc<dyn libertas_core::repositories::MediaRepository>> {
|
||||
match pool {
|
||||
|
||||
@@ -38,12 +38,41 @@ impl QueryBuilder<ListMediaOptions> for MediaQueryBuilder {
|
||||
mut query: SqlxQueryBuilder<'a, sqlx::Postgres>,
|
||||
options: &'a ListMediaOptions,
|
||||
) -> CoreResult<SqlxQueryBuilder<'a, sqlx::Postgres>> {
|
||||
if let Some(_filter) = &options.filter {
|
||||
// In the future, you would add logic here:
|
||||
// if let Some(mime) = &filter.mime_type {
|
||||
// query.push(" AND mime_type = ");
|
||||
// query.push_bind(mime);
|
||||
// }
|
||||
let mut metadata_filter_count = 0;
|
||||
|
||||
if let Some(filter) = &options.filter {
|
||||
if let Some(mime) = &filter.mime_type {
|
||||
query.push(" AND media.mime_type = ");
|
||||
query.push_bind(mime);
|
||||
}
|
||||
|
||||
if let Some(metadata_filters) = &filter.metadata_filters {
|
||||
if !metadata_filters.is_empty() {
|
||||
metadata_filter_count = metadata_filters.len();
|
||||
|
||||
query.push(" JOIN media_metadata mm ON media.id == mm.media_id ");
|
||||
query.push(" AND ( ");
|
||||
|
||||
for (i, filter) in metadata_filters.iter().enumerate() {
|
||||
if i > 0 {
|
||||
query.push(" OR ");
|
||||
}
|
||||
|
||||
query.push(" ( mm.tag_name = ");
|
||||
query.push_bind(&filter.tag_name);
|
||||
query.push(" AND mm.tag_value = ");
|
||||
query.push_bind(&filter.tag_value);
|
||||
query.push(" ) ");
|
||||
}
|
||||
query.push(" ) ");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if metadata_filter_count > 0 {
|
||||
query.push(" GROUP BY media.id ");
|
||||
query.push(" HAVING COUNT(DISTINCT mm.tag_name) = ");
|
||||
query.push_bind(metadata_filter_count as i64);
|
||||
}
|
||||
|
||||
if let Some(sort) = &options.sort {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use libertas_core::{
|
||||
config::Config, error::{CoreError, CoreResult}, models::Media, repositories::MediaRepository, schema::ListMediaOptions
|
||||
config::AppConfig, error::{CoreError, CoreResult}, models::Media, repositories::MediaRepository, schema::ListMediaOptions
|
||||
};
|
||||
use sqlx::PgPool;
|
||||
use uuid::Uuid;
|
||||
@@ -16,11 +16,11 @@ pub struct PostgresMediaRepository {
|
||||
}
|
||||
|
||||
impl PostgresMediaRepository {
|
||||
pub fn new(pool: PgPool, config: &Config) -> Self {
|
||||
pub fn new(pool: PgPool, config: &AppConfig) -> Self {
|
||||
let allowed_columns = config
|
||||
.allowed_sort_columns
|
||||
.clone()
|
||||
.unwrap_or_else(|| vec!["created_at".to_string()]);
|
||||
.unwrap_or_else(|| vec!["created_at".to_string(), "original_filename".to_string()]);
|
||||
|
||||
Self { pool, query_builder: Arc::new(MediaQueryBuilder::new(allowed_columns)) }
|
||||
}
|
||||
@@ -89,10 +89,10 @@ impl MediaRepository for PostgresMediaRepository {
|
||||
async fn list_by_user(&self, user_id: Uuid, options: &ListMediaOptions) -> CoreResult<Vec<Media>> {
|
||||
let mut query = sqlx::QueryBuilder::new(
|
||||
r#"
|
||||
SELECT id, owner_id, storage_path, original_filename, mime_type, hash, created_at,
|
||||
thumbnail_path
|
||||
SELECT media.id, media.owner_id, media.storage_path, media.original_filename, media.mime_type, media.hash, media.created_at,
|
||||
media.thumbnail_path
|
||||
FROM media
|
||||
WHERE owner_id =
|
||||
WHERE media.owner_id =
|
||||
"#,
|
||||
);
|
||||
|
||||
|
||||
@@ -1,25 +1 @@
|
||||
use libertas_core::{
|
||||
config::{Config, DatabaseConfig, DatabaseType},
|
||||
error::CoreResult,
|
||||
};
|
||||
|
||||
pub fn load_config() -> CoreResult<Config> {
|
||||
Ok(Config {
|
||||
database: DatabaseConfig {
|
||||
db_type: DatabaseType::Postgres,
|
||||
url: "postgres://libertas:libertas_password@localhost:5436/libertas_db".to_string(),
|
||||
},
|
||||
server_address: "127.0.0.1:8080".to_string(),
|
||||
jwt_secret: "super_secret_jwt_key".to_string(),
|
||||
media_library_path: "media_library".to_string(),
|
||||
broker_url: "nats://localhost:4222".to_string(),
|
||||
max_upload_size_mb: Some(100),
|
||||
default_storage_quota_gb: Some(10),
|
||||
allowed_sort_columns: Some(vec![
|
||||
"date_taken".to_string(),
|
||||
"created_at".to_string(),
|
||||
"original_filename".to_string(),
|
||||
]),
|
||||
thumbnail_config: None,
|
||||
})
|
||||
}
|
||||
pub use libertas_core::config::load_config;
|
||||
Reference in New Issue
Block a user