application layer

Co-authored-by: Copilot <copilot@github.com>
This commit is contained in:
2026-05-04 01:19:59 +02:00
parent 810bad1126
commit 65bab7fd44
16 changed files with 392 additions and 31 deletions

6
Cargo.lock generated
View File

@@ -26,6 +26,12 @@ checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]]
name = "application"
version = "0.1.0"
dependencies = [
"chrono",
"domain",
"tracing",
"uuid",
]
[[package]]
name = "async-trait"

View File

@@ -4,3 +4,7 @@ version = "0.1.0"
edition = "2024"
[dependencies]
domain = { workspace = true }
uuid = { workspace = true }
chrono = { workspace = true }
tracing = { workspace = true }

View File

@@ -0,0 +1,20 @@
use chrono::NaiveDateTime;
use uuid::Uuid;
pub struct LogReviewCommand {
pub external_metadata_id: Option<String>,
pub manual_title: Option<String>,
pub manual_release_year: Option<u16>,
pub manual_director: Option<String>,
pub user_id: Uuid,
pub rating: u8,
pub comment: Option<String>,
pub watched_at: NaiveDateTime,
}
pub struct SyncPosterCommand {
pub movie_id: Uuid,
pub external_metadata_id: String,
}

View File

@@ -0,0 +1,17 @@
use std::sync::Arc;
use domain::ports::{
AuthService, EventPublisher, MetadataClient, MovieRepository, PasswordHasher,
PosterFetcherClient, PosterStorage,
};
#[derive(Clone)]
pub struct AppContext {
pub repository: Arc<dyn MovieRepository>,
pub metadata_client: Arc<dyn MetadataClient>,
pub poster_fetcher: Arc<dyn PosterFetcherClient>,
pub poster_storage: Arc<dyn PosterStorage>,
pub event_publisher: Arc<dyn EventPublisher>,
pub auth_service: Arc<dyn AuthService>,
pub password_hasher: Arc<dyn PasswordHasher>,
}

View File

@@ -1,14 +1,4 @@
pub fn add(left: u64, right: u64) -> u64 {
left + right
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
}
}
pub mod commands;
pub mod context;
pub mod queries;
pub mod use_cases;

View File

@@ -0,0 +1,13 @@
use domain::models::SortDirection;
use uuid::Uuid;
pub struct GetDiaryQuery {
pub limit: Option<u32>,
pub offset: Option<u32>,
pub sort_by: Option<SortDirection>,
pub movie_id: Option<Uuid>,
}
pub struct GetReviewHistoryQuery {
pub movie_id: Uuid,
}

View File

@@ -0,0 +1,29 @@
use domain::{
errors::DomainError,
models::{
DiaryEntry, DiaryFilter, SortDirection,
collections::{PageParams, Paginated},
},
value_objects::MovieId,
};
use crate::{context::AppContext, queries::GetDiaryQuery};
pub async fn execute(
ctx: &AppContext,
query: GetDiaryQuery,
) -> Result<Paginated<DiaryEntry>, DomainError> {
let page = PageParams::new(query.limit, query.offset)?;
let movie_id = query.movie_id.map(MovieId::from_uuid);
let filter = DiaryFilter {
sort_by: query.sort_by.unwrap_or(SortDirection::Descending),
page,
movie_id,
};
let paginated_results = ctx.repository.query_diary(&filter).await?;
Ok(paginated_results)
}

View File

@@ -0,0 +1,23 @@
use domain::{
errors::DomainError,
models::ReviewHistory,
services::review_history::{ReviewHistoryAnalyzer, Trend},
value_objects::MovieId,
};
use crate::{context::AppContext, queries::GetReviewHistoryQuery};
pub async fn execute(
ctx: &AppContext,
query: GetReviewHistoryQuery,
) -> Result<(ReviewHistory, Trend), DomainError> {
let movie_id = MovieId::from_uuid(query.movie_id);
let mut history = ctx.repository.get_review_history(&movie_id).await?;
let trend = ReviewHistoryAnalyzer::rating_trend(&history)?;
ReviewHistoryAnalyzer::sort_chronologically(&mut history);
Ok((history, trend))
}

View File

@@ -0,0 +1,115 @@
use domain::{
errors::DomainError,
events::DomainEvent,
models::{Movie, Review},
value_objects::{Comment, ExternalMetadataId, MovieTitle, Rating, ReleaseYear, UserId},
};
use crate::{commands::LogReviewCommand, context::AppContext};
pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), DomainError> {
let rating = Rating::new(cmd.rating)?;
let user_id = UserId::from_uuid(cmd.user_id);
let comment = cmd.comment.clone().map(Comment::new).transpose()?;
let (movie, is_new_movie) = resolve_movie(ctx, &cmd).await?;
ctx.repository.upsert_movie(&movie).await?;
let review = Review::new(movie.id().clone(), user_id, rating, comment, cmd.watched_at)?;
let review_event = ctx.repository.save_review(&review).await?;
publish_events(ctx, &movie, is_new_movie, review_event).await?;
Ok(())
}
async fn resolve_movie(
ctx: &AppContext,
cmd: &LogReviewCommand,
) -> Result<(Movie, bool), DomainError> {
if let Some(ext_id_str) = &cmd.external_metadata_id {
if let Some(resolved) = resolve_external_movie(ctx, ext_id_str).await? {
return Ok(resolved);
}
}
resolve_manual_movie(ctx, cmd).await
}
async fn resolve_external_movie(
ctx: &AppContext,
ext_id_str: &str,
) -> Result<Option<(Movie, bool)>, DomainError> {
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
if let Some(m) = ctx.repository.get_movie_by_external_id(&tmdb_id).await? {
return Ok(Some((m, false)));
}
match ctx.metadata_client.fetch_movie_metadata(&tmdb_id).await {
Ok(m) => Ok(Some((m, true))),
Err(e) => {
tracing::warn!(
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
e
);
Ok(None)
}
}
}
async fn resolve_manual_movie(
ctx: &AppContext,
cmd: &LogReviewCommand,
) -> Result<(Movie, bool), DomainError> {
let title_str = cmd.manual_title.as_ref().ok_or_else(|| {
DomainError::ValidationError(
"Manual title required if TMDB fetch fails or is omitted".into(),
)
})?;
let year_val = cmd.manual_release_year.ok_or_else(|| {
DomainError::ValidationError(
"Manual release year required if TMDB fetch fails or is omitted".into(),
)
})?;
let title = MovieTitle::new(title_str.clone())?;
let release_year = ReleaseYear::new(year_val)?;
let candidates = ctx
.repository
.get_movies_by_title_and_year(&title, &release_year)
.await?;
let matched_movie = candidates
.into_iter()
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
if let Some(existing_movie) = matched_movie {
Ok((existing_movie, false))
} else {
let new_movie = Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
Ok((new_movie, true))
}
}
async fn publish_events(
ctx: &AppContext,
movie: &Movie,
is_new_movie: bool,
review_event: DomainEvent,
) -> Result<(), DomainError> {
if is_new_movie {
if let Some(ext_id) = movie.external_metadata_id() {
let discovery_event = DomainEvent::MovieDiscovered {
movie_id: movie.id().clone(),
external_metadata_id: ext_id.clone(),
};
ctx.event_publisher.publish(&discovery_event).await?;
}
}
ctx.event_publisher.publish(&review_event).await?;
Ok(())
}

View File

@@ -0,0 +1,4 @@
pub mod get_diary;
pub mod get_review_history;
pub mod log_review;
pub mod sync_poster;

View File

@@ -0,0 +1,47 @@
use domain::{
errors::DomainError,
value_objects::{ExternalMetadataId, MovieId},
};
use crate::{commands::SyncPosterCommand, context::AppContext};
pub async fn execute(ctx: &AppContext, cmd: SyncPosterCommand) -> Result<(), DomainError> {
let movie_id = MovieId::from_uuid(cmd.movie_id);
let external_metadata_id = ExternalMetadataId::new(cmd.external_metadata_id)?;
let mut movie = match ctx.repository.get_movie_by_id(&movie_id).await? {
Some(m) => m,
None => {
tracing::warn!(
"Sync cancelled: Movie {} not found in local DB",
movie_id.value()
);
return Err(DomainError::NotFound("Movie not found".into()));
}
};
let poster_url = match ctx
.metadata_client
.get_poster_url(&external_metadata_id)
.await
{
Ok(Some(url)) => url,
Ok(None) => return Ok(()),
Err(e) => {
tracing::warn!("Warning: Failed to find poster URL metadata: {:?}", e);
return Err(e);
}
};
let image_bytes = ctx.poster_fetcher.fetch_poster_bytes(&poster_url).await?;
let stored_path = ctx
.poster_storage
.store_poster(&movie_id, &image_bytes)
.await?;
movie.update_poster(stored_path);
ctx.repository.upsert_movie(&movie).await?;
Ok(())
}

View File

@@ -1,6 +1,6 @@
use chrono::NaiveDateTime;
use crate::value_objects::{MovieId, Rating, ReviewId, UserId};
use crate::value_objects::{ExternalMetadataId, MovieId, Rating, ReviewId, UserId};
#[derive(Clone, Debug)]
pub enum DomainEvent {
@@ -11,4 +11,8 @@ pub enum DomainEvent {
rating: Rating,
watched_at: NaiveDateTime,
},
MovieDiscovered {
movie_id: MovieId,
external_metadata_id: ExternalMetadataId,
},
}

View File

@@ -27,7 +27,7 @@ pub struct DiaryFilter {
#[derive(Clone, Debug)]
pub struct Movie {
id: MovieId,
external_metadata_id: ExternalMetadataId,
external_metadata_id: Option<ExternalMetadataId>,
title: MovieTitle,
release_year: ReleaseYear,
director: Option<String>,
@@ -36,7 +36,7 @@ pub struct Movie {
impl Movie {
pub fn new(
external_metadata_id: ExternalMetadataId,
external_metadata_id: Option<ExternalMetadataId>,
title: MovieTitle,
release_year: ReleaseYear,
director: Option<String>,
@@ -52,11 +52,15 @@ impl Movie {
}
}
pub fn update_poster(&mut self, poster_path: PosterPath) {
self.poster_path = Some(poster_path);
}
pub fn id(&self) -> &MovieId {
&self.id
}
pub fn external_metadata_id(&self) -> &ExternalMetadataId {
&self.external_metadata_id
pub fn external_metadata_id(&self) -> Option<&ExternalMetadataId> {
self.external_metadata_id.as_ref()
}
pub fn title(&self) -> &MovieTitle {
&self.title
@@ -72,6 +76,24 @@ impl Movie {
}
}
impl Movie {
pub fn is_manual_match(
&self,
title: &MovieTitle,
year: &ReleaseYear,
director: Option<&str>,
) -> bool {
if self.title != *title || self.release_year != *year {
return false;
}
match (self.director(), director) {
(Some(existing_dir), Some(new_dir)) => existing_dir.eq_ignore_ascii_case(new_dir),
_ => true,
}
}
}
#[derive(Clone, Debug)]
pub struct Review {
id: ReviewId,
@@ -135,14 +157,43 @@ impl Review {
#[derive(Clone, Debug)]
pub struct DiaryEntry {
pub movie: Movie,
pub review: Review,
movie: Movie,
review: Review,
}
impl DiaryEntry {
pub fn new(movie: Movie, review: Review) -> Self {
Self { movie, review }
}
pub fn movie(&self) -> &Movie {
&self.movie
}
pub fn review(&self) -> &Review {
&self.review
}
}
#[derive(Clone, Debug)]
pub struct ReviewHistory {
pub movie: Movie,
pub viewings: Vec<Review>,
movie: Movie,
viewings: Vec<Review>,
}
impl ReviewHistory {
pub fn new(movie: Movie, viewings: Vec<Review>) -> Self {
Self { movie, viewings }
}
pub fn movie(&self) -> &Movie {
&self.movie
}
pub fn viewings(&self) -> &[Review] {
&self.viewings
}
pub fn viewings_mut(&mut self) -> &mut Vec<Review> {
&mut self.viewings
}
}
#[derive(Clone, Debug)]

View File

@@ -4,11 +4,25 @@ use crate::{
errors::DomainError,
events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
value_objects::{ExternalMetadataId, MovieId, PasswordHash, PosterPath, UserId},
value_objects::{
ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl, ReleaseYear,
UserId,
},
};
#[async_trait]
pub trait MovieRepository: Send + Sync {
async fn get_movie_by_external_id(
&self,
external_metadata_id: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError>;
async fn get_movie_by_id(&self, movie_id: &MovieId) -> Result<Option<Movie>, DomainError>;
async fn get_movies_by_title_and_year(
&self,
title: &MovieTitle,
year: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError>;
async fn upsert_movie(&self, movie: &Movie) -> Result<(), DomainError>;
async fn save_review(&self, review: &Review) -> Result<DomainEvent, DomainError>;
@@ -25,11 +39,15 @@ pub trait MetadataClient: Send + Sync {
&self,
external_metadata_id: &ExternalMetadataId,
) -> Result<Movie, DomainError>;
async fn get_poster_url(
&self,
external_metadata_id: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError>;
}
#[async_trait]
pub trait PosterFetcherClient: Send + Sync {
async fn fetch_poster_bytes(&self, poster_url: &str) -> Result<Vec<u8>, DomainError>;
async fn fetch_poster_bytes(&self, poster_url: &PosterUrl) -> Result<Vec<u8>, DomainError>;
}
#[async_trait]

View File

@@ -12,35 +12,35 @@ pub enum Trend {
impl ReviewHistoryAnalyzer {
pub fn sort_chronologically(history: &mut ReviewHistory) {
history
.viewings
.viewings_mut()
.sort_by(|a, b| a.watched_at().cmp(&b.watched_at()));
}
pub fn get_latest_rating(history: &ReviewHistory) -> Option<&Rating> {
history
.viewings
.viewings()
.iter()
.max_by_key(|r| r.watched_at())
.map(|r| r.rating())
}
pub fn rating_trend(history: &ReviewHistory) -> Result<Trend, DomainError> {
if history.viewings.len() < 2 {
if history.viewings().len() < 2 {
return Ok(Trend::Neutral);
}
let mut sorted_history = history.clone();
Self::sort_chronologically(&mut sorted_history);
let latest_review = sorted_history.viewings.pop().unwrap();
let latest_review = sorted_history.viewings().last().unwrap();
let latest_rating = latest_review.rating().value() as f32;
let previous_sum: u32 = sorted_history
.viewings
.viewings()
.iter()
.map(|r| r.rating().value() as u32)
.sum();
let historical_average = previous_sum as f32 / sorted_history.viewings.len() as f32;
let historical_average = previous_sum as f32 / sorted_history.viewings().len() as f32;
if latest_rating > historical_average {
Ok(Trend::Improved)

View File

@@ -206,3 +206,23 @@ impl PasswordHash {
&self.0
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct PosterUrl(String);
impl PosterUrl {
pub fn new(url: String) -> Result<Self, DomainError> {
let trimmed = url.trim();
if trimmed.is_empty() {
Err(DomainError::ValidationError(
"Poster URL cannot be empty".into(),
))
} else {
Ok(Self(trimmed.to_string()))
}
}
pub fn value(&self) -> &str {
&self.0
}
}