204 lines
5.7 KiB
Rust
204 lines
5.7 KiB
Rust
use anyhow::Result;
|
|
use std::{
|
|
path::{Path, PathBuf},
|
|
sync::Arc,
|
|
};
|
|
|
|
use clap::Parser;
|
|
use libertas_core::{
|
|
config::AppConfig,
|
|
error::{CoreError, CoreResult},
|
|
media_utils::{extract_exif_data, get_storage_path_and_date},
|
|
models::{Media, MediaMetadata, User},
|
|
repositories::{MediaImportRepository, MediaRepository, UserRepository},
|
|
schema::MediaImportBundle,
|
|
};
|
|
use libertas_infra::factory::{
|
|
build_database_pool, build_media_import_repository, build_media_repository,
|
|
build_user_repository,
|
|
};
|
|
use serde_json;
|
|
use sha2::{Digest, Sha256};
|
|
use tokio::fs;
|
|
use uuid::Uuid;
|
|
use walkdir::WalkDir;
|
|
|
|
mod config;
|
|
|
|
#[derive(Parser, Debug)]
|
|
#[command(author, version, about, long_about = None)]
|
|
struct Cli {
|
|
#[arg(short, long)]
|
|
username: String,
|
|
#[arg(short, long)]
|
|
path: String,
|
|
#[arg(short, long, default_value_t = false)]
|
|
recursive: bool,
|
|
}
|
|
|
|
struct ImporterState {
|
|
config: AppConfig,
|
|
media_repo: Arc<dyn MediaRepository>,
|
|
user_repo: Arc<dyn UserRepository>,
|
|
media_import_repo: Arc<dyn MediaImportRepository>,
|
|
nats_client: async_nats::Client,
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main() -> Result<()> {
|
|
let cli = Cli::parse();
|
|
println!(
|
|
"Starting import for user: '{}' from path '{}'...",
|
|
cli.username, cli.path
|
|
);
|
|
|
|
let config = config::load_config()?;
|
|
let db_pool = build_database_pool(&config.database).await?;
|
|
let media_repo = build_media_repository(&config, db_pool.clone()).await?;
|
|
let user_repo = build_user_repository(&config.database, db_pool.clone()).await?;
|
|
let media_import_repo =
|
|
build_media_import_repository(&config.database, db_pool.clone()).await?;
|
|
let nats_client = async_nats::connect(&config.broker_url).await?;
|
|
|
|
println!("Connected to database and NATS broker.");
|
|
|
|
let state = ImporterState {
|
|
config,
|
|
media_repo,
|
|
user_repo,
|
|
media_import_repo,
|
|
nats_client,
|
|
};
|
|
|
|
let mut user = state
|
|
.user_repo
|
|
.find_by_username(&cli.username)
|
|
.await?
|
|
.ok_or_else(|| anyhow::anyhow!("User '{}' not found", cli.username))?;
|
|
|
|
println!("User '{}' found with ID: {}", cli.username, user.id);
|
|
println!("Storage: {} / {}", user.storage_used, user.storage_quota);
|
|
|
|
let max_depth = if cli.recursive { usize::MAX } else { 1 };
|
|
let walker = WalkDir::new(&cli.path).max_depth(max_depth).into_iter();
|
|
|
|
for entry in walker.filter_map(Result::ok) {
|
|
if entry.file_type().is_file() {
|
|
let path = entry.path();
|
|
|
|
match process_file(path, &mut user, &state).await {
|
|
Ok(media) => {
|
|
println!("-> Imported: '{}'", media.original_filename);
|
|
}
|
|
Err(e) => {
|
|
eprintln!("!! Skipped: '{}' (Reason: {})", path.display(), e);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
println!("Import process completed.");
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn process_file(
|
|
file_path: &Path,
|
|
user: &mut User,
|
|
state: &ImporterState,
|
|
) -> CoreResult<Media> {
|
|
let file_bytes = fs::read(file_path).await?;
|
|
let file_size = file_bytes.len() as i64;
|
|
let hash = format!("{:x}", Sha256::digest(&file_bytes));
|
|
let filename = file_path
|
|
.file_name()
|
|
.unwrap_or_default()
|
|
.to_string_lossy()
|
|
.to_string();
|
|
|
|
if user.storage_used + file_size > user.storage_quota {
|
|
return Err(CoreError::Auth("Storage quota exceeded".to_string()));
|
|
}
|
|
|
|
if state.media_repo.find_by_hash(&hash).await?.is_some() {
|
|
return Err(CoreError::Duplicate(
|
|
"A file with this content already exists".to_string(),
|
|
));
|
|
}
|
|
|
|
let extracted_data = match extract_exif_data(file_path).await {
|
|
Ok(data) => {
|
|
println!(" -> Parsed metadata: Tags={}", data.all_tags.len());
|
|
data
|
|
}
|
|
Err(e) => {
|
|
eprintln!(
|
|
" -> Metadata parsing failed for {}: {}. Skipping.",
|
|
file_path.display(),
|
|
e
|
|
);
|
|
Default::default()
|
|
}
|
|
};
|
|
|
|
let (storage_path_buf, _date_taken) = get_storage_path_and_date(&extracted_data, &filename);
|
|
|
|
let mut dest_path_buf = PathBuf::from(&state.config.media_library_path);
|
|
dest_path_buf.push(&storage_path_buf);
|
|
|
|
println!(" -> Storing file at: {}", dest_path_buf.display());
|
|
|
|
if let Some(parent) = dest_path_buf.parent() {
|
|
fs::create_dir_all(parent).await?;
|
|
}
|
|
|
|
fs::copy(file_path, &dest_path_buf).await?;
|
|
|
|
let storage_path_str = storage_path_buf.to_string_lossy().to_string();
|
|
|
|
let mime_type = mime_guess::from_path(file_path)
|
|
.first_or_octet_stream()
|
|
.to_string();
|
|
|
|
let media_model = Media {
|
|
id: Uuid::new_v4(),
|
|
owner_id: user.id,
|
|
storage_path: storage_path_str,
|
|
original_filename: filename,
|
|
mime_type,
|
|
hash,
|
|
created_at: chrono::Utc::now(),
|
|
thumbnail_path: None,
|
|
};
|
|
|
|
let mut metadata_models = Vec::new();
|
|
for (source, tag_name, tag_value) in extracted_data.all_tags {
|
|
metadata_models.push(MediaMetadata {
|
|
id: Uuid::new_v4(),
|
|
media_id: media_model.id,
|
|
source,
|
|
tag_name,
|
|
tag_value,
|
|
});
|
|
}
|
|
|
|
let bundle = MediaImportBundle {
|
|
media_model: media_model.clone(),
|
|
metadata_models,
|
|
file_size,
|
|
};
|
|
|
|
state.media_import_repo.create_media_bundle(bundle).await?;
|
|
|
|
user.storage_used += file_size;
|
|
|
|
let job_payload = serde_json::json!({ "media_id": media_model.id });
|
|
state
|
|
.nats_client
|
|
.publish("media.new".to_string(), job_payload.to_string().into())
|
|
.await
|
|
.map_err(|e| CoreError::Unknown(format!("Failed to publish NATS message: {}", e)))?;
|
|
|
|
Ok(media_model)
|
|
}
|