feat: implement media metadata management with EXIF and TrackInfo support

This commit is contained in:
2025-11-14 07:41:54 +01:00
parent ea95c2255f
commit 55cf4db2de
18 changed files with 343 additions and 195 deletions

View File

@@ -7,7 +7,7 @@ pub fn load_config() -> CoreResult<Config> {
Ok(Config {
database: DatabaseConfig {
db_type: DatabaseType::Postgres,
url: "postgres://libertas:libertas_password@localhost:5436/libertas_db".to_string(),
url: "postgres://postgres:postgres@localhost:5432/libertas_db".to_string(),
},
server_address: "127.0.0.1:8080".to_string(),
jwt_secret: "super_secret_jwt_key".to_string(),

View File

@@ -7,10 +7,9 @@ use std::{
use chrono::Datelike;
use clap::Parser;
use libertas_core::{
config::Config, error::{CoreError, CoreResult}, media_utils::extract_exif_data, models::{Media, User}, repositories::{MediaRepository, UserRepository}
config::Config, error::{CoreError, CoreResult}, media_utils::{extract_exif_data, parse_exif_datetime}, models::{Media, MediaMetadata, MediaMetadataSource, User}, repositories::{MediaMetadataRepository, MediaRepository, UserRepository}
};
use libertas_infra::factory::{build_database_pool, build_media_repository, build_user_repository};
use nom_exif::{AsyncMediaParser, AsyncMediaSource, ExifIter};
use libertas_infra::factory::{build_database_pool, build_media_metadata_repository, build_media_repository, build_user_repository};
use serde_json;
use sha2::{Digest, Sha256};
use tokio::fs;
@@ -34,6 +33,7 @@ struct ImporterState {
config: Config,
media_repo: Arc<dyn MediaRepository>,
user_repo: Arc<dyn UserRepository>,
metadata_repo: Arc<dyn MediaMetadataRepository>,
nats_client: async_nats::Client,
}
@@ -49,6 +49,7 @@ async fn main() -> Result<()> {
let db_pool = build_database_pool(&config.database).await?;
let media_repo = build_media_repository(&config, db_pool.clone()).await?;
let user_repo = build_user_repository(&config.database, db_pool.clone()).await?;
let metadata_repo = build_media_metadata_repository(&config.database, db_pool.clone()).await?;
let nats_client = async_nats::connect(&config.broker_url).await?;
println!("Connected to database and NATS broker.");
@@ -57,6 +58,7 @@ async fn main() -> Result<()> {
config,
media_repo,
user_repo,
metadata_repo,
nats_client,
};
@@ -112,59 +114,33 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
));
}
let (width, height, location, date_taken) = match extract_exif_data(file_path).await {
let extracted_data = match extract_exif_data(file_path).await {
Ok(data) => {
println!(" -> Parsed EXIF: DateTimeOriginal={:?}, GPS={:?}", data.date_taken, data.location);
(data.width, data.height, data.location, data.date_taken)
},
println!(
" -> Parsed metadata: Tags={}",
data.all_tags.len()
);
data
}
Err(e) => {
eprintln!(" -> EXIF parsing failed for {}: {}. Skipping.", file_path.display(), e);
(None, None, None, None)
eprintln!(
" -> Metadata parsing failed for {}: {}. Skipping.",
file_path.display(),
e
);
Default::default()
}
};
match AsyncMediaSource::file_path(file_path).await {
Ok(ms) => {
if ms.has_exif() {
let mut parser = AsyncMediaParser::new();
if let Ok(iter) = parser.parse::<_, _, ExifIter>(ms).await {
let values = iter
.into_iter()
.filter_map(|mut x| {
let res = x.take_result();
match res {
Ok(v) => Some((
x.tag().map(|x| x.to_string()).unwrap_or_else(|| {
format!("Unknown(0x{:04x})", x.tag_code())
}),
v,
)),
Err(e) => {
println!(
" !! EXIF parsing error for tag 0x{:04x}: {}",
x.tag_code(),
e
);
None
}
}
})
.collect::<Vec<_>>();
let date_taken_str = extracted_data.all_tags.iter()
.find(|(source, tag_name, _)| {
*source == MediaMetadataSource::Exif &&
(tag_name == "DateTimeOriginal" || tag_name == "ModifyDate")
})
.map(|(_, _, tag_value)| tag_value);
values.iter().for_each(|x| {
println!("{:<32}=> {}", x.0, x.1);
});
} else {
()
}
} else {
()
}
}
Err(_) => (),
};
let file_date = date_taken.unwrap_or_else(|| chrono::Utc::now());
let date_taken = date_taken_str.and_then(|s| parse_exif_datetime(s));
let file_date = date_taken.unwrap_or_else(chrono::Utc::now);
let year = file_date.year().to_string();
let month = format!("{:02}", file_date.month());
let mut dest_path_buf = PathBuf::from(&state.config.media_library_path);
@@ -176,13 +152,11 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
dest_path_buf.push(&filename);
fs::copy(file_path, &dest_path_buf).await?;
let storage_path_str = PathBuf::from(&year)
.join(&month)
.join(&filename)
.to_string_lossy()
.to_string();
let mime_type = mime_guess::from_path(file_path)
.first_or_octet_stream()
.to_string();
@@ -195,14 +169,29 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
mime_type,
hash,
created_at: chrono::Utc::now(),
extracted_location: location,
width: width,
height: height,
date_taken: date_taken,
thumbnail_path: None,
};
state.media_repo.create(&media_model).await?;
let mut metadata_models = Vec::new();
for (source, tag_name, tag_value) in extracted_data.all_tags {
metadata_models.push(MediaMetadata {
id: Uuid::new_v4(),
media_id: media_model.id,
source,
tag_name,
tag_value,
});
}
if !metadata_models.is_empty() {
state
.metadata_repo
.create_batch(&metadata_models)
.await?;
}
state
.user_repo
.update_storage_used(user.id, file_size)