feat: implement media metadata management with EXIF and TrackInfo support

This commit is contained in:
2025-11-14 07:41:54 +01:00
parent ea95c2255f
commit 55cf4db2de
18 changed files with 343 additions and 195 deletions

View File

@@ -0,0 +1,11 @@
CREATE TABLE media_metadata (
id UUID PRIMARY KEY,
media_id UUID NOT NULL REFERENCES media (id) ON DELETE CASCADE,
source TEXT NOT NULL,
tag_name TEXT NOT NULL,
tag_value TEXT NOT NULL
);
CREATE INDEX idx_media_metadata_media_id ON media_metadata (media_id);
CREATE INDEX idx_media_metadata_tag_name_value ON media_metadata (tag_name, tag_value);

View File

@@ -0,0 +1,7 @@
ALTER TABLE media
DROP COLUMN extracted_location,
DROP COLUMN width,
DROP COLUMN height,
DROP COLUMN date_taken;
DROP INDEX IF EXISTS idx_media_date_taken;

View File

@@ -256,10 +256,6 @@ impl MediaServiceImpl {
mime_type, mime_type,
hash, hash,
created_at: chrono::Utc::now(), created_at: chrono::Utc::now(),
extracted_location: None,
width: None,
height: None,
date_taken: None,
thumbnail_path: None, thumbnail_path: None,
}; };

View File

@@ -1,19 +1,16 @@
use std::path::Path; use std::path::Path;
use chrono::{DateTime, NaiveDateTime, Utc}; use chrono::{DateTime, NaiveDateTime, Utc};
use nom_exif::{AsyncMediaParser, AsyncMediaSource, Exif, ExifIter, ExifTag}; use nom_exif::{AsyncMediaParser, AsyncMediaSource, ExifIter, TrackInfo};
use crate::error::{CoreError, CoreResult}; use crate::{error::{CoreError, CoreResult}, models::MediaMetadataSource};
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub struct ExtractedExif { pub struct ExtractedExif {
pub width: Option<i32>, pub all_tags: Vec<(MediaMetadataSource, String, String)>,
pub height: Option<i32>,
pub location: Option<String>,
pub date_taken: Option<DateTime<Utc>>,
} }
fn parse_exif_datetime(s: &str) -> Option<DateTime<Utc>> { pub fn parse_exif_datetime(s: &str) -> Option<DateTime<Utc>> {
NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S") NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S")
.ok() .ok()
.map(|ndt| ndt.and_local_timezone(Utc).unwrap()) .map(|ndt| ndt.and_local_timezone(Utc).unwrap())
@@ -24,11 +21,9 @@ pub async fn extract_exif_data(file_path: &Path) -> CoreResult<ExtractedExif> {
.await .await
.map_err(|e| CoreError::Unknown(format!("Failed to open file for EXIF: {}", e)))?; .map_err(|e| CoreError::Unknown(format!("Failed to open file for EXIF: {}", e)))?;
if !ms.has_exif() {
return Ok(ExtractedExif::default());
}
let mut parser = AsyncMediaParser::new(); let mut parser = AsyncMediaParser::new();
let all_tags = if ms.has_exif() {
let iter: ExifIter = match parser.parse(ms).await { let iter: ExifIter = match parser.parse(ms).await {
Ok(iter) => iter, Ok(iter) => iter,
Err(e) => { Err(e) => {
@@ -37,35 +32,46 @@ pub async fn extract_exif_data(file_path: &Path) -> CoreResult<ExtractedExif> {
} }
}; };
let location = iter.parse_gps_info().ok().flatten().map(|g| g.format_iso6709()); iter.into_iter()
let exif: Exif = iter.into(); .filter_map(|mut x| {
let res = x.take_result();
let width = exif match res {
.get(ExifTag::ExifImageWidth) Ok(v) => Some((
.and_then(|f| f.as_u32()) MediaMetadataSource::Exif,
.map(|v| v as i32); x.tag()
.map(|t| t.to_string())
let height = exif .unwrap_or_else(|| format!("Unknown(0x{:04x})", x.tag_code())),
.get(ExifTag::ExifImageHeight) v.to_string(),
.and_then(|f| f.as_u32()) )),
.map(|v| v as i32); Err(e) => {
println!(
let dt_original = exif " !! EXIF parsing error for tag 0x{:04x}: {}",
.get(ExifTag::DateTimeOriginal) x.tag_code(),
.and_then(|f| f.as_str()) e
.and_then(parse_exif_datetime); );
None
let dt_modify = exif }
.get(ExifTag::ModifyDate) }
.and_then(|f| f.as_str())
.and_then(parse_exif_datetime);
let date_taken = dt_original.or(dt_modify);
Ok(ExtractedExif {
width,
height,
location,
date_taken,
}) })
.collect::<Vec<_>>()
} else {
match parser.parse::<_, _, TrackInfo>(ms).await {
Ok(info) => info
.into_iter()
.map(|x| {
(
MediaMetadataSource::TrackInfo,
x.0.to_string(),
x.1.to_string(),
)
})
.collect::<Vec<_>>(),
Err(e) => {
println!("Could not parse TrackInfo: {}", e);
return Ok(ExtractedExif::default());
}
}
};
Ok(ExtractedExif { all_tags })
} }

View File

@@ -25,6 +25,30 @@ impl From<&str> for Role {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MediaMetadataSource {
Exif,
TrackInfo,
}
impl MediaMetadataSource {
pub fn as_str(&self) -> &'static str {
match self {
MediaMetadataSource::Exif => "exif",
MediaMetadataSource::TrackInfo => "track_info",
}
}
}
impl From<&str> for MediaMetadataSource {
fn from(s: &str) -> Self {
match s {
"track_info" => MediaMetadataSource::TrackInfo,
_ => MediaMetadataSource::Exif,
}
}
}
pub struct Media { pub struct Media {
pub id: uuid::Uuid, pub id: uuid::Uuid,
pub owner_id: uuid::Uuid, pub owner_id: uuid::Uuid,
@@ -33,13 +57,17 @@ pub struct Media {
pub mime_type: String, pub mime_type: String,
pub hash: String, pub hash: String,
pub created_at: chrono::DateTime<chrono::Utc>, pub created_at: chrono::DateTime<chrono::Utc>,
pub extracted_location: Option<String>,
pub width: Option<i32>,
pub height: Option<i32>,
pub date_taken: Option<chrono::DateTime<chrono::Utc>>,
pub thumbnail_path: Option<String>, pub thumbnail_path: Option<String>,
} }
pub struct MediaMetadata {
pub id: uuid::Uuid,
pub media_id: uuid::Uuid,
pub source: MediaMetadataSource,
pub tag_name: String,
pub tag_value: String,
}
#[derive(Clone)] #[derive(Clone)]
pub struct User { pub struct User {
pub id: uuid::Uuid, pub id: uuid::Uuid,

View File

@@ -3,7 +3,7 @@ use std::sync::Arc;
use async_trait::async_trait; use async_trait::async_trait;
use crate::{ use crate::{
config::Config, error::CoreResult, models::Media, repositories::{AlbumRepository, MediaRepository, UserRepository} config::Config, error::CoreResult, models::Media, repositories::{AlbumRepository, MediaMetadataRepository, MediaRepository, UserRepository}
}; };
pub struct PluginData { pub struct PluginData {
@@ -14,6 +14,7 @@ pub struct PluginContext {
pub media_repo: Arc<dyn MediaRepository>, pub media_repo: Arc<dyn MediaRepository>,
pub album_repo: Arc<dyn AlbumRepository>, pub album_repo: Arc<dyn AlbumRepository>,
pub user_repo: Arc<dyn UserRepository>, pub user_repo: Arc<dyn UserRepository>,
pub metadata_repo: Arc<dyn MediaMetadataRepository>,
pub media_library_path: String, pub media_library_path: String,
pub config: Arc<Config>, pub config: Arc<Config>,
} }

View File

@@ -3,7 +3,7 @@ use uuid::Uuid;
use crate::{ use crate::{
error::CoreResult, error::CoreResult,
models::{Album, AlbumPermission, Media, User}, schema::ListMediaOptions, models::{Album, AlbumPermission, Media, MediaMetadata, User}, schema::ListMediaOptions,
}; };
#[async_trait] #[async_trait]
@@ -12,14 +12,6 @@ pub trait MediaRepository: Send + Sync {
async fn create(&self, media: &Media) -> CoreResult<()>; async fn create(&self, media: &Media) -> CoreResult<()>;
async fn find_by_id(&self, id: Uuid) -> CoreResult<Option<Media>>; async fn find_by_id(&self, id: Uuid) -> CoreResult<Option<Media>>;
async fn list_by_user(&self, user_id: Uuid, options: &ListMediaOptions) -> CoreResult<Vec<Media>>; async fn list_by_user(&self, user_id: Uuid, options: &ListMediaOptions) -> CoreResult<Vec<Media>>;
async fn update_exif_data(
&self,
id: Uuid,
width: Option<i32>,
height: Option<i32>,
location: Option<String>,
date_taken: Option<chrono::DateTime<chrono::Utc>>,
) -> CoreResult<()>;
async fn update_thumbnail_path(&self, id: Uuid, thumbnail_path: String) -> CoreResult<()>; async fn update_thumbnail_path(&self, id: Uuid, thumbnail_path: String) -> CoreResult<()>;
async fn delete(&self, id: Uuid) -> CoreResult<()>; async fn delete(&self, id: Uuid) -> CoreResult<()>;
} }
@@ -60,3 +52,9 @@ pub trait AlbumShareRepository: Send + Sync {
async fn is_media_in_shared_album(&self, media_id: Uuid, user_id: Uuid) -> CoreResult<bool>; async fn is_media_in_shared_album(&self, media_id: Uuid, user_id: Uuid) -> CoreResult<bool>;
} }
#[async_trait]
pub trait MediaMetadataRepository: Send + Sync {
async fn create_batch(&self, metadata: &[MediaMetadata]) -> CoreResult<()>;
async fn find_by_media_id(&self, media_id: Uuid) -> CoreResult<Vec<MediaMetadata>>;
}

View File

@@ -7,7 +7,7 @@ pub fn load_config() -> CoreResult<Config> {
Ok(Config { Ok(Config {
database: DatabaseConfig { database: DatabaseConfig {
db_type: DatabaseType::Postgres, db_type: DatabaseType::Postgres,
url: "postgres://libertas:libertas_password@localhost:5436/libertas_db".to_string(), url: "postgres://postgres:postgres@localhost:5432/libertas_db".to_string(),
}, },
server_address: "127.0.0.1:8080".to_string(), server_address: "127.0.0.1:8080".to_string(),
jwt_secret: "super_secret_jwt_key".to_string(), jwt_secret: "super_secret_jwt_key".to_string(),

View File

@@ -7,10 +7,9 @@ use std::{
use chrono::Datelike; use chrono::Datelike;
use clap::Parser; use clap::Parser;
use libertas_core::{ use libertas_core::{
config::Config, error::{CoreError, CoreResult}, media_utils::extract_exif_data, models::{Media, User}, repositories::{MediaRepository, UserRepository} config::Config, error::{CoreError, CoreResult}, media_utils::{extract_exif_data, parse_exif_datetime}, models::{Media, MediaMetadata, MediaMetadataSource, User}, repositories::{MediaMetadataRepository, MediaRepository, UserRepository}
}; };
use libertas_infra::factory::{build_database_pool, build_media_repository, build_user_repository}; use libertas_infra::factory::{build_database_pool, build_media_metadata_repository, build_media_repository, build_user_repository};
use nom_exif::{AsyncMediaParser, AsyncMediaSource, ExifIter};
use serde_json; use serde_json;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use tokio::fs; use tokio::fs;
@@ -34,6 +33,7 @@ struct ImporterState {
config: Config, config: Config,
media_repo: Arc<dyn MediaRepository>, media_repo: Arc<dyn MediaRepository>,
user_repo: Arc<dyn UserRepository>, user_repo: Arc<dyn UserRepository>,
metadata_repo: Arc<dyn MediaMetadataRepository>,
nats_client: async_nats::Client, nats_client: async_nats::Client,
} }
@@ -49,6 +49,7 @@ async fn main() -> Result<()> {
let db_pool = build_database_pool(&config.database).await?; let db_pool = build_database_pool(&config.database).await?;
let media_repo = build_media_repository(&config, db_pool.clone()).await?; let media_repo = build_media_repository(&config, db_pool.clone()).await?;
let user_repo = build_user_repository(&config.database, db_pool.clone()).await?; let user_repo = build_user_repository(&config.database, db_pool.clone()).await?;
let metadata_repo = build_media_metadata_repository(&config.database, db_pool.clone()).await?;
let nats_client = async_nats::connect(&config.broker_url).await?; let nats_client = async_nats::connect(&config.broker_url).await?;
println!("Connected to database and NATS broker."); println!("Connected to database and NATS broker.");
@@ -57,6 +58,7 @@ async fn main() -> Result<()> {
config, config,
media_repo, media_repo,
user_repo, user_repo,
metadata_repo,
nats_client, nats_client,
}; };
@@ -112,59 +114,33 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
)); ));
} }
let (width, height, location, date_taken) = match extract_exif_data(file_path).await { let extracted_data = match extract_exif_data(file_path).await {
Ok(data) => { Ok(data) => {
println!(" -> Parsed EXIF: DateTimeOriginal={:?}, GPS={:?}", data.date_taken, data.location);
(data.width, data.height, data.location, data.date_taken)
},
Err(e) => {
eprintln!(" -> EXIF parsing failed for {}: {}. Skipping.", file_path.display(), e);
(None, None, None, None)
}
};
match AsyncMediaSource::file_path(file_path).await {
Ok(ms) => {
if ms.has_exif() {
let mut parser = AsyncMediaParser::new();
if let Ok(iter) = parser.parse::<_, _, ExifIter>(ms).await {
let values = iter
.into_iter()
.filter_map(|mut x| {
let res = x.take_result();
match res {
Ok(v) => Some((
x.tag().map(|x| x.to_string()).unwrap_or_else(|| {
format!("Unknown(0x{:04x})", x.tag_code())
}),
v,
)),
Err(e) => {
println!( println!(
" !! EXIF parsing error for tag 0x{:04x}: {}", " -> Parsed metadata: Tags={}",
x.tag_code(), data.all_tags.len()
);
data
}
Err(e) => {
eprintln!(
" -> Metadata parsing failed for {}: {}. Skipping.",
file_path.display(),
e e
); );
None Default::default()
} }
}
})
.collect::<Vec<_>>();
values.iter().for_each(|x| {
println!("{:<32}=> {}", x.0, x.1);
});
} else {
()
}
} else {
()
}
}
Err(_) => (),
}; };
let file_date = date_taken.unwrap_or_else(|| chrono::Utc::now()); let date_taken_str = extracted_data.all_tags.iter()
.find(|(source, tag_name, _)| {
*source == MediaMetadataSource::Exif &&
(tag_name == "DateTimeOriginal" || tag_name == "ModifyDate")
})
.map(|(_, _, tag_value)| tag_value);
let date_taken = date_taken_str.and_then(|s| parse_exif_datetime(s));
let file_date = date_taken.unwrap_or_else(chrono::Utc::now);
let year = file_date.year().to_string(); let year = file_date.year().to_string();
let month = format!("{:02}", file_date.month()); let month = format!("{:02}", file_date.month());
let mut dest_path_buf = PathBuf::from(&state.config.media_library_path); let mut dest_path_buf = PathBuf::from(&state.config.media_library_path);
@@ -176,13 +152,11 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
dest_path_buf.push(&filename); dest_path_buf.push(&filename);
fs::copy(file_path, &dest_path_buf).await?; fs::copy(file_path, &dest_path_buf).await?;
let storage_path_str = PathBuf::from(&year) let storage_path_str = PathBuf::from(&year)
.join(&month) .join(&month)
.join(&filename) .join(&filename)
.to_string_lossy() .to_string_lossy()
.to_string(); .to_string();
let mime_type = mime_guess::from_path(file_path) let mime_type = mime_guess::from_path(file_path)
.first_or_octet_stream() .first_or_octet_stream()
.to_string(); .to_string();
@@ -195,14 +169,29 @@ async fn process_file(file_path: &Path, user: &User, state: &ImporterState) -> C
mime_type, mime_type,
hash, hash,
created_at: chrono::Utc::now(), created_at: chrono::Utc::now(),
extracted_location: location,
width: width,
height: height,
date_taken: date_taken,
thumbnail_path: None, thumbnail_path: None,
}; };
state.media_repo.create(&media_model).await?; state.media_repo.create(&media_model).await?;
let mut metadata_models = Vec::new();
for (source, tag_name, tag_value) in extracted_data.all_tags {
metadata_models.push(MediaMetadata {
id: Uuid::new_v4(),
media_id: media_model.id,
source,
tag_name,
tag_value,
});
}
if !metadata_models.is_empty() {
state
.metadata_repo
.create_batch(&metadata_models)
.await?;
}
state state
.user_repo .user_repo
.update_storage_used(user.id, file_size) .update_storage_used(user.id, file_size)

View File

@@ -10,6 +10,14 @@ pub enum PostgresRole {
Admin, Admin,
} }
#[derive(Debug, Clone, PartialEq, Eq, sqlx::Type)]
#[sqlx(rename_all = "lowercase")]
#[sqlx(type_name = "TEXT")]
pub enum PostgresMediaMetadataSource {
Exif,
TrackInfo,
}
#[derive(sqlx::FromRow)] #[derive(sqlx::FromRow)]
pub struct PostgresUser { pub struct PostgresUser {
@@ -51,6 +59,15 @@ pub struct PostgresMedia {
pub thumbnail_path: Option<String>, pub thumbnail_path: Option<String>,
} }
#[derive(sqlx::FromRow)]
pub struct PostgresMediaMetadata {
pub id: uuid::Uuid,
pub media_id: uuid::Uuid,
pub source: String,
pub tag_name: String,
pub tag_value: String,
}
#[derive(Debug, Clone, Copy, sqlx::Type, PartialEq, Eq, Deserialize)] #[derive(Debug, Clone, Copy, sqlx::Type, PartialEq, Eq, Deserialize)]
#[sqlx(rename_all = "lowercase")] #[sqlx(rename_all = "lowercase")]
#[sqlx(type_name = "album_permission")] #[sqlx(type_name = "album_permission")]

View File

@@ -87,3 +87,19 @@ pub async fn build_album_share_repository(
)), )),
} }
} }
pub async fn build_media_metadata_repository(
_db_config: &DatabaseConfig,
pool: DatabasePool,
) -> CoreResult<Arc<dyn libertas_core::repositories::MediaMetadataRepository>> {
match pool {
DatabasePool::Postgres(pg_pool) => Ok(Arc::new(
crate::repositories::media_metadata_repository::PostgresMediaMetadataRepository::new(
pg_pool,
),
)),
DatabasePool::Sqlite(_sqlite_pool) => Err(CoreError::Database(
"Sqlite media metadata repository not implemented".to_string(),
)),
}
}

View File

@@ -1,6 +1,6 @@
use libertas_core::models::{Album, AlbumPermission, AlbumShare, Media, Role, User}; use libertas_core::models::{Album, AlbumPermission, AlbumShare, Media, MediaMetadata, MediaMetadataSource, Role, User};
use crate::db_models::{PostgresAlbum, PostgresAlbumPermission, PostgresAlbumShare, PostgresMedia, PostgresRole, PostgresUser}; use crate::db_models::{PostgresAlbum, PostgresAlbumPermission, PostgresAlbumShare, PostgresMedia, PostgresMediaMetadata, PostgresMediaMetadataSource, PostgresRole, PostgresUser};
impl From<PostgresRole> for Role { impl From<PostgresRole> for Role {
fn from(pg_role: PostgresRole) -> Self { fn from(pg_role: PostgresRole) -> Self {
@@ -20,6 +20,24 @@ impl From<Role> for PostgresRole {
} }
} }
impl From<PostgresMediaMetadataSource> for MediaMetadataSource {
fn from(pg_source: PostgresMediaMetadataSource) -> Self {
match pg_source {
PostgresMediaMetadataSource::Exif => MediaMetadataSource::Exif,
PostgresMediaMetadataSource::TrackInfo => MediaMetadataSource::TrackInfo,
}
}
}
impl From<MediaMetadataSource> for PostgresMediaMetadataSource {
fn from(source: MediaMetadataSource) -> Self {
match source {
MediaMetadataSource::Exif => PostgresMediaMetadataSource::Exif,
MediaMetadataSource::TrackInfo => PostgresMediaMetadataSource::TrackInfo,
}
}
}
impl From<PostgresUser> for User { impl From<PostgresUser> for User {
fn from(pg_user: PostgresUser) -> Self { fn from(pg_user: PostgresUser) -> Self {
User { User {
@@ -60,15 +78,23 @@ impl From<PostgresMedia> for Media {
mime_type: pg_media.mime_type, mime_type: pg_media.mime_type,
hash: pg_media.hash, hash: pg_media.hash,
created_at: pg_media.created_at, created_at: pg_media.created_at,
extracted_location: pg_media.extracted_location,
width: pg_media.width,
height: pg_media.height,
date_taken: pg_media.date_taken,
thumbnail_path: pg_media.thumbnail_path, thumbnail_path: pg_media.thumbnail_path,
} }
} }
} }
impl From<PostgresMediaMetadata> for MediaMetadata {
fn from(pg_metadata: PostgresMediaMetadata) -> Self {
MediaMetadata {
id: pg_metadata.id,
media_id: pg_metadata.media_id,
source: MediaMetadataSource::from(pg_metadata.source.as_str()),
tag_name: pg_metadata.tag_name,
tag_value: pg_metadata.tag_value,
}
}
}
impl From<PostgresAlbumPermission> for AlbumPermission { impl From<PostgresAlbumPermission> for AlbumPermission {
fn from(pg_permission: PostgresAlbumPermission) -> Self { fn from(pg_permission: PostgresAlbumPermission) -> Self {
match pg_permission { match pg_permission {

View File

@@ -0,0 +1,79 @@
use async_trait::async_trait;
use libertas_core::{error::{CoreError, CoreResult}, models::MediaMetadata, repositories::MediaMetadataRepository};
use sqlx::PgPool;
use crate::db_models::{PostgresMediaMetadata, PostgresMediaMetadataSource};
pub struct PostgresMediaMetadataRepository {
pool: PgPool,
}
impl PostgresMediaMetadataRepository {
pub fn new(pool: PgPool) -> Self {
Self { pool }
}
}
#[async_trait]
impl MediaMetadataRepository for PostgresMediaMetadataRepository {
async fn create_batch(&self, metadata: &[MediaMetadata]) -> CoreResult<()> {
if metadata.is_empty() {
return Ok(());
}
let mut ids = Vec::with_capacity(metadata.len());
let mut media_ids = Vec::with_capacity(metadata.len());
let mut sources = Vec::with_capacity(metadata.len());
let mut tag_names = Vec::with_capacity(metadata.len());
let mut tag_values = Vec::with_capacity(metadata.len());
for item in metadata {
ids.push(item.id);
media_ids.push(item.media_id);
sources.push(item.source.into());
tag_names.push(item.tag_name.clone());
tag_values.push(item.tag_value.clone());
}
sqlx::query!(
r#"
INSERT INTO media_metadata (id, media_id, source, tag_name, tag_value)
SELECT * FROM unnest(
$1::uuid[],
$2::uuid[],
$3::text[],
$4::text[],
$5::text[]
)
"#,
&ids,
&media_ids,
&sources as &[PostgresMediaMetadataSource],
&tag_names,
&tag_values,
)
.execute(&self.pool)
.await
.map_err(|e| CoreError::Database(e.to_string()))?;
Ok(())
}
async fn find_by_media_id(&self, media_id: uuid::Uuid) -> CoreResult<Vec<MediaMetadata>> {
let pg_metadata = sqlx::query_as!(
PostgresMediaMetadata,
r#"
SELECT id, media_id, source, tag_name, tag_value
FROM media_metadata
WHERE media_id = $1
"#,
media_id
)
.fetch_all(&self.pool)
.await
.map_err(|e| CoreError::Database(e.to_string()))?;
let metadata = pg_metadata.into_iter().map(|m| m.into()).collect();
Ok(metadata)
}
}

View File

@@ -31,8 +31,8 @@ impl MediaRepository for PostgresMediaRepository {
async fn create(&self, media: &Media) -> CoreResult<()> { async fn create(&self, media: &Media) -> CoreResult<()> {
sqlx::query!( sqlx::query!(
r#" r#"
INSERT INTO media (id, owner_id, storage_path, original_filename, mime_type, hash, created_at, width, height, thumbnail_path) INSERT INTO media (id, owner_id, storage_path, original_filename, mime_type, hash, created_at, thumbnail_path)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
"#, "#,
media.id, media.id,
media.owner_id, media.owner_id,
@@ -41,8 +41,6 @@ impl MediaRepository for PostgresMediaRepository {
media.mime_type, media.mime_type,
media.hash, media.hash,
media.created_at, media.created_at,
media.width,
media.height,
media.thumbnail_path media.thumbnail_path
) )
.execute(&self.pool) .execute(&self.pool)
@@ -114,33 +112,6 @@ impl MediaRepository for PostgresMediaRepository {
Ok(media_list) Ok(media_list)
} }
async fn update_exif_data(
&self,
id: Uuid,
width: Option<i32>,
height: Option<i32>,
location: Option<String>,
date_taken: Option<chrono::DateTime<chrono::Utc>>,
) -> CoreResult<()> {
sqlx::query!(
r#"
UPDATE media
SET width = $2, height = $3, extracted_location = $4, date_taken = $5
WHERE id = $1 AND date_taken IS NULL
"#,
id,
width,
height,
location,
date_taken
)
.execute(&self.pool)
.await
.map_err(|e| CoreError::Database(e.to_string()))?;
Ok(())
}
async fn update_thumbnail_path(&self, id: Uuid, thumbnail_path: String) -> CoreResult<()> { async fn update_thumbnail_path(&self, id: Uuid, thumbnail_path: String) -> CoreResult<()> {
sqlx::query!( sqlx::query!(
r#" r#"

View File

@@ -2,3 +2,4 @@ pub mod album_repository;
pub mod album_share_repository; pub mod album_share_repository;
pub mod media_repository; pub mod media_repository;
pub mod user_repository; pub mod user_repository;
pub mod media_metadata_repository;

View File

@@ -3,7 +3,7 @@ use std::{path::PathBuf, sync::Arc};
use futures_util::StreamExt; use futures_util::StreamExt;
use libertas_core::plugins::PluginContext; use libertas_core::plugins::PluginContext;
use libertas_infra::factory::{ use libertas_infra::factory::{
build_album_repository, build_database_pool, build_media_repository, build_user_repository, build_album_repository, build_database_pool, build_media_metadata_repository, build_media_repository, build_user_repository
}; };
use serde::Deserialize; use serde::Deserialize;
use tokio::fs; use tokio::fs;
@@ -37,11 +37,14 @@ async fn main() -> anyhow::Result<()> {
let media_repo = build_media_repository(&config, db_pool.clone()).await?; let media_repo = build_media_repository(&config, db_pool.clone()).await?;
let album_repo = build_album_repository(&config.database, db_pool.clone()).await?; let album_repo = build_album_repository(&config.database, db_pool.clone()).await?;
let user_repo = build_user_repository(&config.database, db_pool.clone()).await?; let user_repo = build_user_repository(&config.database, db_pool.clone()).await?;
let metadata_repo =
build_media_metadata_repository(&config.database, db_pool.clone()).await?;
let context = Arc::new(PluginContext { let context = Arc::new(PluginContext {
media_repo, media_repo,
album_repo, album_repo,
user_repo, user_repo,
metadata_repo,
media_library_path: config.media_library_path.clone(), media_library_path: config.media_library_path.clone(),
config: Arc::new(config.clone()), config: Arc::new(config.clone()),
}); });

View File

@@ -1,8 +1,9 @@
use async_trait::async_trait; use async_trait::async_trait;
use uuid::Uuid;
use std::path::PathBuf; use std::path::PathBuf;
use libertas_core::{ use libertas_core::{
error::CoreResult, media_utils::extract_exif_data, models::Media, plugins::{MediaProcessorPlugin, PluginContext, PluginData} error::CoreResult, media_utils::extract_exif_data, models::{Media, MediaMetadata}, plugins::{MediaProcessorPlugin, PluginContext, PluginData}
}; };
pub struct ExifReaderPlugin; pub struct ExifReaderPlugin;
@@ -16,8 +17,8 @@ impl MediaProcessorPlugin for ExifReaderPlugin {
async fn process(&self, media: &Media, context: &PluginContext) -> CoreResult<PluginData> { async fn process(&self, media: &Media, context: &PluginContext) -> CoreResult<PluginData> {
let file_path = PathBuf::from(&context.media_library_path).join(&media.storage_path); let file_path = PathBuf::from(&context.media_library_path).join(&media.storage_path);
let (width, height, location, date_taken) = match extract_exif_data(&file_path).await { let extracted_data = match extract_exif_data(&file_path).await {
Ok(data) => (data.width, data.height, data.location, data.date_taken), Ok(data) => data,
Err(e) => { Err(e) => {
return Ok(PluginData { return Ok(PluginData {
message: format!("Could not parse EXIF: {}", e), message: format!("Could not parse EXIF: {}", e),
@@ -25,21 +26,28 @@ impl MediaProcessorPlugin for ExifReaderPlugin {
} }
}; };
if width.is_some() || height.is_some() || location.is_some() || date_taken.is_some() { let mut metadata_models = Vec::new();
context for (source, tag_name, tag_value) in extracted_data.all_tags {
.media_repo metadata_models.push(MediaMetadata {
.update_exif_data(media.id, width, height, location.clone(), date_taken) id: Uuid::new_v4(),
.await?; media_id: media.id,
source,
let message = format!( tag_name,
"Extracted EXIF: width={:?}, height={:?}, location={:?}, date_taken={:?}", tag_value,
width, height, location, date_taken });
);
Ok(PluginData { message })
} else {
Ok(PluginData {
message: "No EXIF width/height or GPS location found.".to_string(),
})
} }
let message;
if !metadata_models.is_empty() {
context
.metadata_repo
.create_batch(&metadata_models)
.await?;
message = format!("Saved {} metadata tags.", metadata_models.len());
} else {
message = "No metadata tags found.".to_string();
}
Ok(PluginData { message })
} }
} }

View File

@@ -18,13 +18,8 @@ impl MediaProcessorPlugin for XmpWriterPlugin {
} }
async fn process(&self, media: &Media, context: &PluginContext) -> CoreResult<PluginData> { async fn process(&self, media: &Media, context: &PluginContext) -> CoreResult<PluginData> {
let fresh_media = context let metadata = context.metadata_repo.find_by_media_id(media.id).await?;
.media_repo let file_path = PathBuf::from(&context.media_library_path).join(&media.storage_path);
.find_by_id(media.id)
.await?
.ok_or(CoreError::NotFound("Media".to_string(), media.id))?;
let file_path = PathBuf::from(&context.media_library_path).join(&fresh_media.storage_path);
let xmp_path = format!("{}.xmp", file_path.to_string_lossy()); let xmp_path = format!("{}.xmp", file_path.to_string_lossy());
let mut xmp = XmpMeta::new() let mut xmp = XmpMeta::new()
@@ -33,28 +28,24 @@ impl MediaProcessorPlugin for XmpWriterPlugin {
xmp.set_property( xmp.set_property(
"http://purl.org/dc/elements/1.1/", "http://purl.org/dc/elements/1.1/",
"description", "description",
&fresh_media.original_filename.into(), &XmpValue::from(media.original_filename.as_str()),
) )
.map_err(|e| { .map_err(|e| {
CoreError::Unknown(format!("Failed to set description property in XMP: {}", e)) CoreError::Unknown(format!("Failed to set description property in XMP: {}", e))
})?; })?;
if let Some(date_taken) = &fresh_media.date_taken { if let Some(date_tag) = metadata.iter().find(|m| m.tag_name == "DateTimeOriginal") {
let date_str = date_taken.to_rfc3339(); let date_str = &date_tag.tag_value;
xmp.set_property( xmp.set_property(
"http://ns.adobe.com/exif/1.0/", "http://ns.adobe.com/exif/1.0/",
"DateTimeOriginal", "DateTimeOriginal",
&XmpValue::from(date_str), &XmpValue::from(date_str.as_str()),
) )
.map_err(|e| { .map_err(|e| {
CoreError::Unknown(format!("Failed to set DateTimeOriginal in XMP: {}", e)) CoreError::Unknown(format!("Failed to set DateTimeOriginal in XMP: {}", e))
})?; })?;
} }
if let Some(_location) = &fresh_media.extracted_location {
// TODO: Set location properties in XMP
}
let xmp_str = xmp.to_string(); let xmp_str = xmp.to_string();
fs::write(&xmp_path, xmp_str).await?; fs::write(&xmp_path, xmp_str).await?;