Compare commits

...

44 Commits

Author SHA1 Message Date
7a66661932 css 2026-05-04 23:24:04 +02:00
b30a6a102b feat: per-page titles, OG/SEO tags, HOST/PORT env vars, BASE_URL in config 2026-05-04 22:38:58 +02:00
38a3aa6bbf fix: update .gitignore to include db-shm and db-wal files
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 22:23:08 +02:00
3135a15cb3 fix: WAL mode + busy_timeout for SQLite, fix rate limiter TOCTOU race 2026-05-04 22:10:19 +02:00
d083f8ae3d refactor: use constant for minimum password length and API rate limit
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 21:41:07 +02:00
874c406d4a fix: security hardening — SameSite=Strict, Secure cookie flag, password min length, generic registration error, auth rate limiting 2026-05-04 21:38:23 +02:00
78e1f4ef72 clean up 2026-05-04 21:24:44 +02:00
cf74b06b4a fix: use pixel bar heights and show avg rating values in trends chart 2026-05-04 21:22:47 +02:00
317898d51b fix: count distinct movies per user in users list, not total reviews 2026-05-04 21:10:32 +02:00
790bb6fbb5 fix: read BASE_URL from env for RSS channel link 2026-05-04 21:06:51 +02:00
658df38788 fix: move rss_url after user lookup, extract RSS_FEED_LIMIT constant 2026-05-04 21:05:08 +02:00
cff0f854fa feat: point RSS nav link to user feed when on profile page 2026-05-04 21:00:31 +02:00
66ade70273 feat: add GET /users/{id}/feed.rss per-user RSS feed handler 2026-05-04 20:58:20 +02:00
cbd2ac5b3e feat: add rss_url to HtmlPageContext, use it in nav 2026-05-04 20:55:31 +02:00
0433cd4d9b fix: remove unused feed_title from RssAdapter 2026-05-04 20:54:32 +02:00
b5a8ea2395 feat: add title param to render_feed, use dynamic title in RSS adapter 2026-05-04 20:52:07 +02:00
49b79799c1 feat: add user_id filter to GetDiaryQuery and get_diary use case 2026-05-04 20:49:31 +02:00
f4aba551a2 fix: derive heatmap color from primary instead of hardcoded blue 2026-05-04 20:38:13 +02:00
91df35dbd3 fix: count distinct movies in user stats, not total reviews 2026-05-04 20:35:48 +02:00
623f90e43f fix: remove timezone-broken future-date check from Review::new 2026-05-04 20:29:11 +02:00
e28f628c80 fix: remove redundant 'common' section from documentation 2026-05-04 20:28:06 +02:00
60c25d4c24 fix: update test assertion for new default page limit 2026-05-04 20:18:04 +02:00
22aafe99be fix: set domain DEFAULT_LIMIT to 5 for pagination 2026-05-04 20:16:18 +02:00
0ff22cca5f fix: remove email from top bar nav 2026-05-04 20:13:14 +02:00
ccc39e27e4 fix: lower default page limit to 5 2026-05-04 20:11:55 +02:00
76319756f4 feat: add chrono dependency to Cargo.lock 2026-05-04 19:48:40 +02:00
7703227970 fix: add missing trait stubs to test mock impls 2026-05-04 19:23:56 +02:00
b9933bb48d feat: add profile/feed/chart CSS styles 2026-05-04 19:17:39 +02:00
0c48708ce6 fix: has_more overflow, magic constant, remove dead get_index handler 2026-05-04 19:15:42 +02:00
a2a889bced feat: wire activity feed, users list, and profile page handlers 2026-05-04 19:12:06 +02:00
a4846f3bea fix: pagination underflow, remove |safe, move bar_height_pct to adapter 2026-05-04 19:09:28 +02:00
27be840faa fix: adjust domain accessors and template adapter for Askama compatibility 2026-05-04 19:03:48 +02:00
965fc0eda8 feat: add activity feed, users, and profile HTML templates 2026-05-04 19:03:44 +02:00
d700b85337 fix: correct relative_time future guard, heatmap exact match, max clarity 2026-05-04 18:57:17 +02:00
ffbab75910 feat: add Askama template structs for feed/users/profile 2026-05-04 18:55:18 +02:00
dda7c40f7f fix: validate view param, document V1 history load 2026-05-04 18:53:26 +02:00
1b827b1bdd feat: add activity feed/users/profile use cases and port methods 2026-05-04 18:48:16 +02:00
1ee6873a60 fix: address code review issues in SQLite adapter 2026-05-04 18:46:31 +02:00
7352b533ff feat: implement feed/stats/history/trends SQLite queries 2026-05-04 18:42:45 +02:00
85e254fee2 feat: impl UserRepository::list_with_stats 2026-05-04 18:40:58 +02:00
fa8221322d feat: add feed/stats SQLite row types 2026-05-04 18:32:59 +02:00
38da37de55 feat: add feed/profile/stats port methods to repositories 2026-05-04 18:30:01 +02:00
f3dedbad8a fix: use UserId newtype in UserSummary instead of raw Uuid 2026-05-04 18:29:10 +02:00
d468ce131f feat: add feed/profile domain models, extend DiaryFilter 2026-05-04 18:26:55 +02:00
51 changed files with 2193 additions and 890 deletions

View File

@@ -4,3 +4,7 @@ target/
*.db *.db
*.db-shm *.db-shm
*.db-wal *.db-wal
.cargo/
.sqlx/
docs/
dev.db

View File

@@ -1,5 +1,7 @@
DATABASE_URL=sqlite:./dev.db DATABASE_URL=sqlite:./dev.db
BASE_URL=http://localhost:3000
PORT=3000 PORT=3000
SECURE_COOKIES=false
JWT_SECRET= JWT_SECRET=
JWT_TTL_SECONDS= JWT_TTL_SECONDS=
ALLOW_REGISTRATION=true ALLOW_REGISTRATION=true

2
.gitignore vendored
View File

@@ -8,6 +8,8 @@
.env.prod .env.prod
*.db *.db
*db-shm
*db-wal
.worktrees/ .worktrees/
.superpowers/ .superpowers/

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews WHERE user_id = ?",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23"
}

View File

@@ -0,0 +1,98 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,\n u.email AS user_email\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n INNER JOIN users u ON u.id = r.user_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
},
{
"name": "user_email",
"ordinal": 13,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false,
false
]
},
"hash": "217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT strftime('%Y-%m', watched_at) AS month\n FROM reviews\n WHERE user_id = ?\n GROUP BY month\n ORDER BY COUNT(*) DESC\n LIMIT 1",
"describe": {
"columns": [
{
"name": "month",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(DISTINCT movie_id) AS \"total!: i64\",\n AVG(CAST(rating AS REAL)) AS avg_rating\n FROM reviews WHERE user_id = ?",
"describe": {
"columns": [
{
"name": "total!: i64",
"ordinal": 0,
"type_info": "Integer"
},
{
"name": "avg_rating",
"ordinal": 1,
"type_info": "Float"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true
]
},
"hash": "a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.rating DESC, r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT m.director AS \"director!\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 5",
"describe": {
"columns": [
{
"name": "director!",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "count!: i64",
"ordinal": 1,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true,
false
]
},
"hash": "aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT m.director\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 1",
"describe": {
"columns": [
{
"name": "director",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "SELECT u.id,\n u.email,\n COUNT(DISTINCT r.movie_id) AS \"total_movies!: i64\",\n AVG(CAST(r.rating AS REAL)) AS avg_rating\n FROM users u\n LEFT JOIN reviews r ON r.user_id = u.id\n GROUP BY u.id, u.email\n ORDER BY u.email ASC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "total_movies!: i64",
"ordinal": 2,
"type_info": "Integer"
},
{
"name": "avg_rating",
"ordinal": 3,
"type_info": "Float"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
true
]
},
"hash": "f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT strftime('%Y-%m', watched_at) AS \"month!\",\n AVG(CAST(rating AS REAL)) AS \"avg_rating!: f64\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews\n WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')\n GROUP BY \"month!\"\n ORDER BY \"month!\" ASC",
"describe": {
"columns": [
{
"name": "month!",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "avg_rating!: f64",
"ordinal": 1,
"type_info": "Float"
},
{
"name": "count!: i64",
"ordinal": 2,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true,
false,
false
]
},
"hash": "fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317"
}

1
Cargo.lock generated
View File

@@ -2831,6 +2831,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"application", "application",
"askama", "askama",
"chrono",
"domain", "domain",
"serde", "serde",
] ]

View File

@@ -17,7 +17,6 @@ Hexagonal (Ports & Adapters) with Domain-Driven Design:
``` ```
domain — pure types and trait definitions, no external deps domain — pure types and trait definitions, no external deps
common — shared error types
application — use cases / business logic orchestration application — use cases / business logic orchestration
presentation — Axum HTTP router, wires all adapters together presentation — Axum HTTP router, wires all adapters together
adapters/ adapters/

View File

@@ -3,22 +3,21 @@ use domain::models::DiaryEntry;
use rss_feed::{ChannelBuilder, GuidBuilder, ItemBuilder}; use rss_feed::{ChannelBuilder, GuidBuilder, ItemBuilder};
pub struct RssAdapter { pub struct RssAdapter {
feed_title: String,
feed_link: String, feed_link: String,
} }
impl RssAdapter { impl RssAdapter {
pub fn new(feed_title: String, feed_link: String) -> Self { pub fn new(feed_link: String) -> Self {
Self { feed_title, feed_link } Self { feed_link }
} }
} }
impl RssFeedRenderer for RssAdapter { impl RssFeedRenderer for RssAdapter {
fn render_feed(&self, entries: &[DiaryEntry]) -> Result<String, String> { fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String> {
let items = entries let items = entries
.iter() .iter()
.map(|e| { .map(|e| {
let title = format!( let item_title = format!(
"{} ({})", "{} ({})",
e.movie().title().value(), e.movie().title().value(),
e.movie().release_year().value() e.movie().release_year().value()
@@ -38,7 +37,7 @@ impl RssFeedRenderer for RssAdapter {
.permalink(false) .permalink(false)
.build(); .build();
ItemBuilder::default() ItemBuilder::default()
.title(Some(title)) .title(Some(item_title))
.description(Some(description)) .description(Some(description))
.pub_date(Some(pub_date)) .pub_date(Some(pub_date))
.guid(Some(guid)) .guid(Some(guid))
@@ -47,12 +46,31 @@ impl RssFeedRenderer for RssAdapter {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let channel = ChannelBuilder::default() let channel = ChannelBuilder::default()
.title(self.feed_title.clone()) .title(title.to_string())
.link(self.feed_link.clone()) .link(self.feed_link.clone())
.description(self.feed_title.clone()) .description(title.to_string())
.items(items) .items(items)
.build(); .build();
Ok(channel.to_string()) Ok(channel.to_string())
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn render_feed_uses_provided_title() {
let adapter = RssAdapter::new("http://example.com".into());
let xml = adapter.render_feed(&[], "Custom Title").unwrap();
assert!(xml.contains("<title>Custom Title</title>"));
}
#[test]
fn render_feed_empty_entries_produces_valid_xml() {
let adapter = RssAdapter::new("http://example.com".into());
let xml = adapter.render_feed(&[], "My Feed").unwrap();
assert!(xml.starts_with("<?xml") || xml.starts_with("<rss"));
}
}

View File

@@ -3,11 +3,12 @@ use domain::{
errors::DomainError, errors::DomainError,
events::DomainEvent, events::DomainEvent,
models::{ models::{
DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, SortDirection, DiaryEntry, DiaryFilter, DirectorStat, FeedEntry, Movie, MonthlyRating,
collections::Paginated, Review, ReviewHistory, SortDirection, UserStats, UserTrends,
collections::{PageParams, Paginated},
}, },
ports::MovieRepository, ports::MovieRepository,
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear, ReviewId}, value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear, ReviewId, UserId},
}; };
use sqlx::SqlitePool; use sqlx::SqlitePool;
@@ -15,10 +16,26 @@ mod migrations;
mod models; mod models;
mod users; mod users;
use models::{DiaryRow, MovieRow, ReviewRow, datetime_to_str}; use models::{
DiaryRow, DirectorCountRow, FeedRow, MonthlyRatingRow, MovieRow, ReviewRow,
UserTotalsRow, datetime_to_str,
};
pub use users::SqliteUserRepository; pub use users::SqliteUserRepository;
fn format_year_month(ym: &str) -> String {
let parts: Vec<&str> = ym.splitn(2, '-').collect();
if parts.len() != 2 { return ym.to_string(); }
let year = parts[0].get(2..).unwrap_or(parts[0]);
let month = match parts[1] {
"01" => "Jan", "02" => "Feb", "03" => "Mar", "04" => "Apr",
"05" => "May", "06" => "Jun", "07" => "Jul", "08" => "Aug",
"09" => "Sep", "10" => "Oct", "11" => "Nov", "12" => "Dec",
_ => parts[1],
};
format!("{} '{}", month, year)
}
pub struct SqliteMovieRepository { pub struct SqliteMovieRepository {
pool: SqlitePool, pool: SqlitePool,
} }
@@ -59,7 +76,8 @@ impl SqliteMovieRepository {
offset: i64, offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> { ) -> Result<Vec<DiaryRow>, DomainError> {
match sort { match sort {
SortDirection::Descending => sqlx::query_as!( // ByRatingDesc only applies to user-scoped queries; falls back to date sort here
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
DiaryRow, DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path, "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
@@ -99,7 +117,8 @@ impl SqliteMovieRepository {
offset: i64, offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> { ) -> Result<Vec<DiaryRow>, DomainError> {
match sort { match sort {
SortDirection::Descending => sqlx::query_as!( // ByRatingDesc only applies to user-scoped queries; falls back to date sort here
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
DiaryRow, DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path, "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
@@ -134,6 +153,141 @@ impl SqliteMovieRepository {
.map_err(Self::map_err), .map_err(Self::map_err),
} }
} }
async fn count_user_diary_entries(&self, user_id: &str) -> Result<i64, DomainError> {
sqlx::query_scalar!(
"SELECT COUNT(*) FROM reviews WHERE user_id = ?",
user_id
)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_diary_rows_by_watched(
&self,
user_id: &str,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
user_id, limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_diary_rows_by_rating(
&self,
user_id: &str,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.rating DESC, r.watched_at DESC
LIMIT ? OFFSET ?",
user_id, limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn count_feed_entries(&self) -> Result<i64, DomainError> {
sqlx::query_scalar!("SELECT COUNT(*) FROM reviews")
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_feed_rows(
&self,
limit: i64,
offset: i64,
) -> Result<Vec<FeedRow>, DomainError> {
sqlx::query_as!(
FeedRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,
u.email AS user_email
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
INNER JOIN users u ON u.id = r.user_id
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_totals(&self, user_id: &str) -> Result<UserTotalsRow, DomainError> {
sqlx::query_as!(
UserTotalsRow,
r#"SELECT COUNT(DISTINCT movie_id) AS "total!: i64",
AVG(CAST(rating AS REAL)) AS avg_rating
FROM reviews WHERE user_id = ?"#,
user_id
)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_favorite_director(
&self,
user_id: &str,
) -> Result<Option<String>, DomainError> {
let row = sqlx::query_scalar!(
"SELECT m.director
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ? AND m.director IS NOT NULL
GROUP BY m.director
ORDER BY COUNT(*) DESC
LIMIT 1",
user_id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(row.flatten())
}
async fn fetch_user_most_active_month(
&self,
user_id: &str,
) -> Result<Option<String>, DomainError> {
let result: Option<Option<String>> = sqlx::query_scalar!(
"SELECT strftime('%Y-%m', watched_at) AS month
FROM reviews
WHERE user_id = ?
GROUP BY month
ORDER BY COUNT(*) DESC
LIMIT 1",
user_id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(result.flatten())
}
} }
#[async_trait] #[async_trait]
@@ -261,18 +415,36 @@ impl MovieRepository for SqliteMovieRepository {
let limit = filter.page.limit as i64; let limit = filter.page.limit as i64;
let offset = filter.page.offset as i64; let offset = filter.page.offset as i64;
let (total, rows) = match &filter.movie_id { let (total, rows) = match (&filter.movie_id, &filter.user_id) {
None => tokio::try_join!( (None, None) => tokio::try_join!(
self.count_diary_entries(None), self.count_diary_entries(None),
self.fetch_all_diary_rows(&filter.sort_by, limit, offset) self.fetch_all_diary_rows(&filter.sort_by, limit, offset)
)?, )?,
Some(id) => { (Some(id), None) => {
let id_str = id.value().to_string(); let id_str = id.value().to_string();
tokio::try_join!( tokio::try_join!(
self.count_diary_entries(Some(id_str.as_str())), self.count_diary_entries(Some(id_str.as_str())),
self.fetch_movie_diary_rows(&id_str, &filter.sort_by, limit, offset) self.fetch_movie_diary_rows(&id_str, &filter.sort_by, limit, offset)
)? )?
} }
(None, Some(uid)) => {
let uid_str = uid.value().to_string();
match &filter.sort_by {
SortDirection::ByRatingDesc => tokio::try_join!(
self.count_user_diary_entries(&uid_str),
self.fetch_user_diary_rows_by_rating(&uid_str, limit, offset)
)?,
_ => tokio::try_join!(
self.count_user_diary_entries(&uid_str),
self.fetch_user_diary_rows_by_watched(&uid_str, limit, offset)
)?,
}
}
(Some(_), Some(_)) => {
return Err(DomainError::ValidationError(
"Combined movie_id + user_id filter not supported".into(),
));
}
}; };
let items = rows let items = rows
@@ -351,4 +523,119 @@ impl MovieRepository for SqliteMovieRepository {
Ok(ReviewHistory::new(movie, viewings)) Ok(ReviewHistory::new(movie, viewings))
} }
async fn query_activity_feed(
&self,
page: &PageParams,
) -> Result<Paginated<FeedEntry>, DomainError> {
let limit = page.limit as i64;
let offset = page.offset as i64;
let (total, rows) = tokio::try_join!(
self.count_feed_entries(),
self.fetch_feed_rows(limit, offset)
)?;
let items = rows
.into_iter()
.map(FeedRow::to_domain)
.collect::<Result<Vec<_>, _>>()?;
Ok(Paginated {
items,
total_count: total as u64,
limit: page.limit,
offset: page.offset,
})
}
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError> {
let uid = user_id.value().to_string();
let (totals, fav_director, most_active) = tokio::try_join!(
self.fetch_user_totals(&uid),
self.fetch_user_favorite_director(&uid),
self.fetch_user_most_active_month(&uid)
)?;
let most_active_month = most_active.map(|ym| format_year_month(&ym));
Ok(UserStats {
total_movies: totals.total,
avg_rating: totals.avg_rating,
favorite_director: fav_director,
most_active_month,
})
}
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError> {
let uid = user_id.value().to_string();
let rows = sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.watched_at DESC",
uid
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?;
rows.into_iter().map(DiaryRow::to_domain).collect()
}
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError> {
let uid = user_id.value().to_string();
let (rating_rows, director_rows) = tokio::try_join!(
sqlx::query_as!(
MonthlyRatingRow,
r#"SELECT strftime('%Y-%m', watched_at) AS "month!",
AVG(CAST(rating AS REAL)) AS "avg_rating!: f64",
COUNT(*) AS "count!: i64"
FROM reviews
WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')
GROUP BY "month!"
ORDER BY "month!" ASC"#,
uid
)
.fetch_all(&self.pool),
sqlx::query_as!(
DirectorCountRow,
r#"SELECT m.director AS "director!",
COUNT(*) AS "count!: i64"
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ? AND m.director IS NOT NULL
GROUP BY m.director
ORDER BY COUNT(*) DESC
LIMIT 5"#,
uid
)
.fetch_all(&self.pool)
)
.map_err(Self::map_err)?;
let max_director_count = director_rows.iter().map(|d| d.count).max().unwrap_or(1);
let monthly_ratings = rating_rows
.into_iter()
.map(|r| MonthlyRating {
month_label: format_year_month(&r.month),
year_month: r.month,
avg_rating: r.avg_rating,
count: r.count,
})
.collect();
let top_directors = director_rows
.into_iter()
.map(|d| DirectorStat { director: d.director, count: d.count })
.collect();
Ok(UserTrends { monthly_ratings, top_directors, max_director_count })
}
} }

View File

@@ -1,7 +1,7 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use domain::{ use domain::{
errors::DomainError, errors::DomainError,
models::{DiaryEntry, Movie, Review}, models::{DiaryEntry, FeedEntry, Movie, Review, UserSummary},
value_objects::{ value_objects::{
Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear, Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear,
ReviewId, UserId, ReviewId, UserId,
@@ -111,6 +111,85 @@ impl DiaryRow {
} }
} }
// Like DiaryRow but includes user_email from JOIN with users table
#[derive(sqlx::FromRow)]
pub(crate) struct FeedRow {
pub id: String,
pub external_metadata_id: Option<String>,
pub title: String,
pub release_year: i64,
pub director: Option<String>,
pub poster_path: Option<String>,
pub review_id: String,
pub movie_id: String,
pub user_id: String,
pub rating: i64,
pub comment: Option<String>,
pub watched_at: String,
pub created_at: String,
pub user_email: String,
}
impl FeedRow {
pub fn to_domain(self) -> Result<FeedEntry, DomainError> {
let diary = DiaryRow {
id: self.id,
external_metadata_id: self.external_metadata_id,
title: self.title,
release_year: self.release_year,
director: self.director,
poster_path: self.poster_path,
review_id: self.review_id,
movie_id: self.movie_id,
user_id: self.user_id,
rating: self.rating,
comment: self.comment,
watched_at: self.watched_at,
created_at: self.created_at,
}
.to_domain()?;
Ok(FeedEntry::new(diary, self.user_email))
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct UserSummaryRow {
pub id: String,
pub email: String,
pub total_movies: i64,
pub avg_rating: Option<f64>,
}
impl UserSummaryRow {
pub fn to_domain(self) -> Result<UserSummary, DomainError> {
Ok(UserSummary {
user_id: UserId::from_uuid(parse_uuid(&self.id)?),
email: self.email,
total_movies: self.total_movies,
avg_rating: self.avg_rating,
})
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct UserTotalsRow {
pub total: i64,
pub avg_rating: Option<f64>,
}
#[derive(sqlx::FromRow)]
pub(crate) struct DirectorCountRow {
pub director: String,
pub count: i64,
}
#[derive(sqlx::FromRow)]
pub(crate) struct MonthlyRatingRow {
pub month: String,
pub avg_rating: f64,
pub count: i64,
}
pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> { pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> {
Uuid::parse_str(s) Uuid::parse_str(s)
.map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e))) .map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e)))

View File

@@ -8,6 +8,7 @@ use domain::{
ports::UserRepository, ports::UserRepository,
value_objects::{Email, PasswordHash, UserId}, value_objects::{Email, PasswordHash, UserId},
}; };
use super::models::UserSummaryRow;
pub struct SqliteUserRepository { pub struct SqliteUserRepository {
pool: SqlitePool, pool: SqlitePool,
@@ -97,6 +98,26 @@ impl UserRepository for SqliteUserRepository {
} }
} }
} }
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> {
sqlx::query_as!(
UserSummaryRow,
r#"SELECT u.id,
u.email,
COUNT(DISTINCT r.movie_id) AS "total_movies!: i64",
AVG(CAST(r.rating AS REAL)) AS avg_rating
FROM users u
LEFT JOIN reviews r ON r.user_id = u.id
GROUP BY u.id, u.email
ORDER BY u.email ASC"#
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?
.into_iter()
.map(UserSummaryRow::to_domain)
.collect()
}
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -7,6 +7,7 @@ edition = "2024"
askama = { version = "0.16.0" } askama = { version = "0.16.0" }
serde = { workspace = true } serde = { workspace = true }
chrono = { workspace = true }
domain = { workspace = true } domain = { workspace = true }
application = { workspace = true } application = { workspace = true }

View File

@@ -1,8 +1,13 @@
use askama::Template; use askama::Template;
use chrono::Datelike;
use application::ports::{ use application::ports::{
HtmlPageContext, HtmlRenderer, LoginPageData, NewReviewPageData, RegisterPageData, ActivityFeedPageData, HtmlPageContext, HtmlRenderer, LoginPageData,
NewReviewPageData, ProfilePageData, RegisterPageData, UsersPageData,
};
use domain::models::{
DiaryEntry, FeedEntry, MonthActivity, MonthlyRating, UserStats, UserSummary, UserTrends,
collections::Paginated,
}; };
use domain::models::{DiaryEntry, collections::Paginated};
#[derive(Template)] #[derive(Template)]
#[template(path = "diary.html")] #[template(path = "diary.html")]
@@ -35,6 +40,95 @@ struct NewReviewTemplate<'a> {
ctx: &'a HtmlPageContext, ctx: &'a HtmlPageContext,
} }
#[derive(Template)]
#[template(path = "activity_feed.html")]
struct ActivityFeedTemplate<'a> {
entries: &'a [FeedEntry],
current_offset: u32,
limit: u32,
has_more: bool,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "users.html")]
struct UsersTemplate<'a> {
users: &'a [UserSummary],
ctx: &'a HtmlPageContext,
}
struct MonthlyRatingRow<'a> {
rating: &'a MonthlyRating,
bar_height_px: i64,
}
#[derive(Template)]
#[template(path = "profile.html")]
struct ProfileTemplate<'a> {
ctx: &'a HtmlPageContext,
profile_display_name: String,
stats: &'a UserStats,
view: &'a str,
entries: Option<&'a Paginated<DiaryEntry>>,
current_offset: u32,
has_more: bool,
limit: u32,
history: Option<&'a Vec<MonthActivity>>,
trends: Option<&'a UserTrends>,
monthly_rating_rows: Vec<MonthlyRatingRow<'a>>,
heatmap: Vec<HeatmapCell>,
}
struct HeatmapCell {
month_label: String,
count: i64,
alpha: f64,
}
#[allow(dead_code)]
fn relative_time(dt: chrono::NaiveDateTime) -> String {
let now = chrono::Utc::now().naive_utc();
let diff = now.signed_duration_since(dt);
if diff.num_seconds() <= 0 { return "just now".to_string(); }
let minutes = diff.num_minutes();
let hours = diff.num_hours();
let days = diff.num_days();
if minutes < 1 { return "just now".to_string(); }
if minutes < 60 { return format!("{} min ago", minutes); }
if hours < 24 { return format!("{} h ago", hours); }
if days == 1 { return "yesterday".to_string(); }
if days < 30 { return format!("{} days ago", days); }
dt.format("%b %-d, %Y").to_string()
}
fn build_heatmap(history: &[MonthActivity]) -> Vec<HeatmapCell> {
let current_year = chrono::Utc::now().year();
let count_for = |m: &str| -> i64 {
history.iter().find(|a| a.year_month == format!("{}-{}", current_year, m))
.map(|a| a.count)
.unwrap_or(0)
};
let months = [
("01", "Jan"), ("02", "Feb"), ("03", "Mar"), ("04", "Apr"),
("05", "May"), ("06", "Jun"), ("07", "Jul"), ("08", "Aug"),
("09", "Sep"), ("10", "Oct"), ("11", "Nov"), ("12", "Dec"),
];
let counts: Vec<i64> = months.iter().map(|(m, _)| count_for(m)).collect();
let max = counts.iter().copied().max().unwrap_or(0).max(1);
months.iter().zip(counts.iter()).map(|((_, label), &count)| {
let alpha = if count == 0 { 0.05 } else { 0.15 + 0.75 * (count as f64 / max as f64) };
HeatmapCell {
month_label: label.to_string(),
count,
alpha,
}
}).collect()
}
fn bar_height_px(avg_rating: f64) -> i64 {
(avg_rating / 5.0 * 60.0) as i64
}
pub struct AskamaHtmlRenderer; pub struct AskamaHtmlRenderer;
impl AskamaHtmlRenderer { impl AskamaHtmlRenderer {
@@ -83,4 +177,55 @@ impl HtmlRenderer for AskamaHtmlRenderer {
.render() .render()
.map_err(|e| e.to_string()) .map_err(|e| e.to_string())
} }
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String> {
ActivityFeedTemplate {
entries: &data.entries.items,
current_offset: data.current_offset,
limit: data.limit,
has_more: data.has_more,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_users_page(&self, data: UsersPageData) -> Result<String, String> {
UsersTemplate {
users: &data.users,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String> {
let heatmap = data.history.as_deref()
.map(|h| build_heatmap(h))
.unwrap_or_default();
let profile_display_name = data.profile_user_email
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
bar_height_px: bar_height_px(r.avg_rating),
rating: r,
}).collect())
.unwrap_or_default();
ProfileTemplate {
ctx: &data.ctx,
profile_display_name,
stats: &data.stats,
view: &data.view,
entries: data.entries.as_ref(),
current_offset: data.current_offset,
has_more: data.has_more,
limit: data.limit,
history: data.history.as_ref(),
trends: data.trends.as_ref(),
monthly_rating_rows,
heatmap,
}
.render()
.map_err(|e| e.to_string())
}
} }

View File

@@ -0,0 +1,50 @@
{% extends "base.html" %}
{% block content %}
<div class="diary">
{% for entry in entries %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster">
<img src="/posters/{{ poster.value() }}" alt="">
</div>
{% endif %}
<div class="entry-body">
<div class="entry-title">
{{ entry.movie().title().value() }}
<span class="year">({{ entry.movie().release_year().value() }})</span>
</div>
{% if let Some(dir) = entry.movie().director() %}
<div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="feed-meta">
<a href="/users/{{ entry.review().user_id().value() }}" class="feed-user">{{ entry.user_display_name() }}</a>
<span class="feed-time">{{ entry.review().watched_at().format("%b %-d, %Y") }}</span>
</div>
{% if ctx.is_current_user(entry.review().user_id().value()) %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %}
</div>
</article>
{% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset >= limit %}
<a href="/?offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="/?offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endblock %}

View File

@@ -3,7 +3,15 @@
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="viewport" content="width=device-width, initial-scale=1">
<title>Movies Diary</title> <title>{{ ctx.page_title }}</title>
<meta name="description" content="A personal movie diary — track what you watch, rate and review films.">
<meta property="og:type" content="website">
<meta property="og:site_name" content="Movies Diary">
<meta property="og:title" content="{{ ctx.page_title }}">
<meta property="og:url" content="{{ ctx.canonical_url }}">
<meta name="twitter:card" content="summary">
<meta name="twitter:title" content="{{ ctx.page_title }}">
<link rel="canonical" href="{{ ctx.canonical_url }}">
<link rel="preconnect" href="https://fonts.googleapis.com"> <link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet"> <link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
@@ -13,10 +21,11 @@
<header> <header>
<a href="/" class="site-title">Movies Diary</a> <a href="/" class="site-title">Movies Diary</a>
<nav> <nav>
<a href="/feed.rss">RSS</a> <a href="/">Feed</a>
<a href="/users">Users</a>
<a href="{{ ctx.rss_url }}">RSS</a>
{% if let Some(email) = ctx.user_email %} {% if let Some(email) = ctx.user_email %}
<a href="/reviews/new">Add Review</a> <a href="/reviews/new">Add Review</a>
<span class="user-email">{{ email }}</span>
<a href="/logout">Logout</a> <a href="/logout">Logout</a>
{% else %} {% else %}
<a href="/login">Login</a> <a href="/login">Login</a>

View File

@@ -0,0 +1,165 @@
{% extends "base.html" %}
{% block content %}
<div class="profile">
<div class="stats-header">
<div class="profile-name">{{ profile_display_name }}</div>
<div class="stats-grid">
<div class="stat-tile">
<div class="stat-value">{{ stats.total_movies }}</div>
<div class="stat-label">movies</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.avg_rating_display() }}★</div>
<div class="stat-label">avg rating</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.favorite_director_display() }}</div>
<div class="stat-label">fav director</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.most_active_month_display() }}</div>
<div class="stat-label">most active</div>
</div>
</div>
</div>
<div class="view-tabs">
<a href="?view=recent" class="view-tab {% if view == "recent" %}active{% endif %}">Recent</a>
<a href="?view=ratings" class="view-tab {% if view == "ratings" %}active{% endif %}">Top Rated</a>
<a href="?view=history" class="view-tab {% if view == "history" %}active{% endif %}">History</a>
<a href="?view=trends" class="view-tab {% if view == "trends" %}active{% endif %}">Trends</a>
</div>
{% if view == "history" %}
{% if let Some(hist) = history %}
<div class="heatmap-section">
<div class="heatmap-label">Movies watched this year</div>
<div class="heatmap">
{% for cell in heatmap %}
<div class="heatmap-cell" style="--alpha: {{ cell.alpha }}">
<div class="heatmap-count">{{ cell.count }}</div>
<div class="heatmap-month">{{ cell.month_label }}</div>
</div>
{% endfor %}
</div>
</div>
{% for month in hist %}
<div class="history-month">
<h3 class="month-heading">{{ month.month_label }} <span class="month-count">{{ month.count }}</span></h3>
<div class="diary">
{% for entry in month.entries %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster"><img src="/posters/{{ poster.value() }}" alt=""></div>
{% endif %}
<div class="entry-body">
<div class="entry-title">{{ entry.movie().title().value() }} <span class="year">({{ entry.movie().release_year().value() }})</span></div>
{% if let Some(dir) = entry.movie().director() %}<div class="director">{{ dir }}</div>{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
<div class="watched-at">{{ entry.review().watched_at().format("%b %-d") }}</div>
</div>
</article>
{% endfor %}
</div>
</div>
{% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
{% endif %}
{% elif view == "trends" %}
{% if let Some(t) = trends %}
<div class="trends-section">
{% if !monthly_rating_rows.is_empty() %}
<div class="chart-block">
<div class="chart-label">Average rating per month</div>
<div class="bar-chart">
{% for row in monthly_rating_rows %}
<div class="bar-col">
<div class="bar-value">{{ "{:.1}"|format(row.rating.avg_rating) }}</div>
<div class="bar-fill" style="height: {{ row.bar_height_px }}px"></div>
<div class="bar-month">{{ row.rating.month_label }}</div>
</div>
{% endfor %}
</div>
</div>
{% endif %}
{% if !t.top_directors.is_empty() %}
<div class="chart-block">
<div class="chart-label">Most watched directors</div>
<div class="director-chart">
{% for d in t.top_directors %}
<div class="director-row">
<div class="director-name">{{ d.director }}</div>
<div class="director-bar">
{% if t.max_director_count > 0 %}
<div class="director-bar-fill" style="width: {{ d.count * 100 / t.max_director_count }}%"></div>
{% else %}
<div class="director-bar-fill" style="width: 0%"></div>
{% endif %}
</div>
<div class="director-count">{{ d.count }}</div>
</div>
{% endfor %}
</div>
</div>
{% endif %}
</div>
{% endif %}
{% else %}
{% if let Some(paged) = entries %}
<div class="diary">
{% for entry in paged.items %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster">
<img src="/posters/{{ poster.value() }}" alt="">
</div>
{% endif %}
<div class="entry-body">
<div class="entry-title">
{{ entry.movie().title().value() }}
<span class="year">({{ entry.movie().release_year().value() }})</span>
</div>
{% if let Some(dir) = entry.movie().director() %}
<div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
{% if ctx.is_current_user(entry.review().user_id().value()) %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %}
</div>
</article>
{% else %}
<p class="empty">No reviews yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset >= limit %}
<a href="?view={{ view }}&offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="?view={{ view }}&offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endif %}
{% endif %}
</div>
{% endblock %}

View File

@@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block content %}
<div class="users-list">
<h2 class="page-title">Members</h2>
{% for user in users %}
<div class="user-row">
<div class="user-avatar">{{ user.initial() }}</div>
<div class="user-info">
<div class="user-name">{{ user.display_name() }}</div>
<div class="user-meta">{{ user.total_movies }} movies · avg {{ user.avg_rating_display() }}★</div>
</div>
<a href="/users/{{ user.user_id.value() }}" class="btn-secondary">View profile →</a>
</div>
{% else %}
<p class="empty">No users yet.</p>
{% endfor %}
</div>
{% endblock %}

View File

@@ -1,6 +1,7 @@
#[derive(Clone)] #[derive(Clone)]
pub struct AppConfig { pub struct AppConfig {
pub allow_registration: bool, pub allow_registration: bool,
pub base_url: String,
} }
impl AppConfig { impl AppConfig {
@@ -8,6 +9,8 @@ impl AppConfig {
let allow_registration = std::env::var("ALLOW_REGISTRATION") let allow_registration = std::env::var("ALLOW_REGISTRATION")
.map(|v| v == "true" || v == "1") .map(|v| v == "true" || v == "1")
.unwrap_or(false); .unwrap_or(false);
Self { allow_registration } let base_url = std::env::var("BASE_URL")
.unwrap_or_else(|_| "http://localhost:3000".to_string());
Self { allow_registration, base_url }
} }
} }

View File

@@ -253,6 +253,10 @@ mod tests {
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected") panic!("unexpected")
} }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
} }
#[async_trait] #[async_trait]
@@ -300,6 +304,10 @@ mod tests {
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected") panic!("unexpected")
} }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
} }
#[async_trait] #[async_trait]
@@ -347,6 +355,10 @@ mod tests {
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected") panic!("unexpected")
} }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
} }
struct MetaReturnsMovie(Movie); struct MetaReturnsMovie(Movie);

View File

@@ -1,11 +1,20 @@
use uuid::Uuid; use uuid::Uuid;
use domain::models::{DiaryEntry, collections::Paginated}; use domain::models::{DiaryEntry, FeedEntry, MonthActivity, UserStats, UserSummary, UserTrends, collections::Paginated};
pub struct HtmlPageContext { pub struct HtmlPageContext {
pub user_email: Option<String>, pub user_email: Option<String>,
pub user_id: Option<Uuid>, pub user_id: Option<Uuid>,
pub register_enabled: bool, pub register_enabled: bool,
pub rss_url: String,
pub page_title: String,
pub canonical_url: String,
}
impl HtmlPageContext {
pub fn is_current_user(&self, id: Uuid) -> bool {
self.user_id == Some(id)
}
} }
pub struct LoginPageData<'a> { pub struct LoginPageData<'a> {
@@ -23,13 +32,43 @@ pub struct NewReviewPageData<'a> {
pub error: Option<&'a str>, pub error: Option<&'a str>,
} }
pub struct ActivityFeedPageData {
pub ctx: HtmlPageContext,
pub entries: Paginated<FeedEntry>,
pub current_offset: u32,
pub has_more: bool,
pub limit: u32,
}
pub struct UsersPageData {
pub ctx: HtmlPageContext,
pub users: Vec<UserSummary>,
}
pub struct ProfilePageData {
pub ctx: HtmlPageContext,
pub profile_user_id: Uuid,
pub profile_user_email: String,
pub stats: UserStats,
pub view: String,
pub entries: Option<Paginated<DiaryEntry>>,
pub current_offset: u32,
pub has_more: bool,
pub limit: u32,
pub history: Option<Vec<MonthActivity>>,
pub trends: Option<UserTrends>,
}
pub trait HtmlRenderer: Send + Sync { pub trait HtmlRenderer: Send + Sync {
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String>; fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String>;
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String>; fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String>;
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String>; fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String>;
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String>; fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String>;
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String>;
fn render_users_page(&self, data: UsersPageData) -> Result<String, String>;
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String>;
} }
pub trait RssFeedRenderer: Send + Sync { pub trait RssFeedRenderer: Send + Sync {
fn render_feed(&self, entries: &[DiaryEntry]) -> Result<String, String>; fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String>;
} }

View File

@@ -6,8 +6,23 @@ pub struct GetDiaryQuery {
pub offset: Option<u32>, pub offset: Option<u32>,
pub sort_by: Option<SortDirection>, pub sort_by: Option<SortDirection>,
pub movie_id: Option<Uuid>, pub movie_id: Option<Uuid>,
pub user_id: Option<Uuid>,
} }
pub struct GetReviewHistoryQuery { pub struct GetReviewHistoryQuery {
pub movie_id: Uuid, pub movie_id: Uuid,
} }
pub struct GetActivityFeedQuery {
pub limit: Option<u32>,
pub offset: Option<u32>,
}
pub struct GetUsersQuery;
pub struct GetUserProfileQuery {
pub user_id: Uuid,
pub view: String,
pub limit: Option<u32>,
pub offset: Option<u32>,
}

View File

@@ -0,0 +1,13 @@
use domain::{
errors::DomainError,
models::{FeedEntry, collections::{PageParams, Paginated}},
};
use crate::{context::AppContext, queries::GetActivityFeedQuery};
pub async fn execute(
ctx: &AppContext,
query: GetActivityFeedQuery,
) -> Result<Paginated<FeedEntry>, DomainError> {
let page = PageParams::new(query.limit, query.offset)?;
ctx.repository.query_activity_feed(&page).await
}

View File

@@ -4,7 +4,7 @@ use domain::{
DiaryEntry, DiaryFilter, SortDirection, DiaryEntry, DiaryFilter, SortDirection,
collections::{PageParams, Paginated}, collections::{PageParams, Paginated},
}, },
value_objects::MovieId, value_objects::{MovieId, UserId},
}; };
use crate::{context::AppContext, queries::GetDiaryQuery}; use crate::{context::AppContext, queries::GetDiaryQuery};
@@ -14,16 +14,15 @@ pub async fn execute(
query: GetDiaryQuery, query: GetDiaryQuery,
) -> Result<Paginated<DiaryEntry>, DomainError> { ) -> Result<Paginated<DiaryEntry>, DomainError> {
let page = PageParams::new(query.limit, query.offset)?; let page = PageParams::new(query.limit, query.offset)?;
let movie_id = query.movie_id.map(MovieId::from_uuid); let movie_id = query.movie_id.map(MovieId::from_uuid);
let user_id = query.user_id.map(UserId::from_uuid);
let filter = DiaryFilter { let filter = DiaryFilter {
sort_by: query.sort_by.unwrap_or(SortDirection::Descending), sort_by: query.sort_by.unwrap_or(SortDirection::Descending),
page, page,
movie_id, movie_id,
user_id,
}; };
let paginated_results = ctx.repository.query_diary(&filter).await?; ctx.repository.query_diary(&filter).await
Ok(paginated_results)
} }

View File

@@ -0,0 +1,93 @@
use domain::{
errors::DomainError,
models::{
DiaryEntry, DiaryFilter, MonthActivity, SortDirection, UserStats, UserTrends,
collections::{PageParams, Paginated},
},
value_objects::UserId,
};
use crate::{context::AppContext, queries::GetUserProfileQuery};
pub struct UserProfileData {
pub stats: UserStats,
pub entries: Option<Paginated<DiaryEntry>>,
pub history: Option<Vec<MonthActivity>>,
pub trends: Option<UserTrends>,
}
pub async fn execute(
ctx: &AppContext,
query: GetUserProfileQuery,
) -> Result<UserProfileData, DomainError> {
let user_id = UserId::from_uuid(query.user_id);
let stats = ctx.repository.get_user_stats(&user_id).await?;
match query.view.as_str() {
"history" => {
// V1: loads all entries into memory. Personal diaries are bounded in size;
// spec calls for showing every movie grouped by month, so full load is intentional.
let all_entries = ctx.repository.get_user_history(&user_id).await?;
let history = group_by_month(all_entries);
Ok(UserProfileData { stats, entries: None, history: Some(history), trends: None })
}
"trends" => {
let trends = ctx.repository.get_user_trends(&user_id).await?;
Ok(UserProfileData { stats, entries: None, history: None, trends: Some(trends) })
}
"ratings" => {
let page = PageParams::new(query.limit, query.offset)?;
let filter = DiaryFilter {
sort_by: SortDirection::ByRatingDesc,
page,
movie_id: None,
user_id: Some(user_id),
};
let entries = ctx.repository.query_diary(&filter).await?;
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
}
"recent" => {
let page = PageParams::new(query.limit, query.offset)?;
let filter = DiaryFilter {
sort_by: SortDirection::Descending,
page,
movie_id: None,
user_id: Some(user_id),
};
let entries = ctx.repository.query_diary(&filter).await?;
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
}
other => Err(DomainError::ValidationError(format!("unknown view: {}", other))),
}
}
fn group_by_month(entries: Vec<DiaryEntry>) -> Vec<MonthActivity> {
use std::collections::BTreeMap;
let mut map: BTreeMap<String, Vec<DiaryEntry>> = BTreeMap::new();
for entry in entries {
let ym = entry.review().watched_at().format("%Y-%m").to_string();
map.entry(ym).or_default().push(entry);
}
let mut result: Vec<MonthActivity> = map
.into_iter()
.map(|(ym, entries)| MonthActivity {
month_label: format_year_month_long(&ym),
count: entries.len() as i64,
entries,
year_month: ym,
})
.collect();
result.reverse();
result
}
fn format_year_month_long(ym: &str) -> String {
let parts: Vec<&str> = ym.splitn(2, '-').collect();
if parts.len() != 2 { return ym.to_string(); }
let month = match parts[1] {
"01" => "January", "02" => "February", "03" => "March", "04" => "April",
"05" => "May", "06" => "June", "07" => "July", "08" => "August",
"09" => "September", "10" => "October", "11" => "November", "12" => "December",
_ => parts[1],
};
format!("{} {}", month, parts[0])
}

View File

@@ -0,0 +1,9 @@
use domain::{errors::DomainError, models::UserSummary};
use crate::{context::AppContext, queries::GetUsersQuery};
pub async fn execute(
ctx: &AppContext,
_query: GetUsersQuery,
) -> Result<Vec<UserSummary>, DomainError> {
ctx.user_repository.list_with_stats().await
}

View File

@@ -1,6 +1,9 @@
pub mod delete_review; pub mod delete_review;
pub mod get_activity_feed;
pub mod get_diary; pub mod get_diary;
pub mod get_review_history; pub mod get_review_history;
pub mod get_user_profile;
pub mod get_users;
pub mod log_review; pub mod log_review;
pub mod login; pub mod login;
pub mod register; pub mod register;

View File

@@ -2,15 +2,25 @@ use domain::{errors::DomainError, models::User, value_objects::Email};
use crate::{commands::RegisterCommand, context::AppContext}; use crate::{commands::RegisterCommand, context::AppContext};
const MIN_PASSWORD_LENGTH: usize = 8;
pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> { pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> {
if !ctx.config.allow_registration { if !ctx.config.allow_registration {
return Err(DomainError::Unauthorized("Registration is disabled".into())); return Err(DomainError::Unauthorized("Registration is disabled".into()));
} }
if cmd.password.len() < MIN_PASSWORD_LENGTH {
return Err(DomainError::ValidationError(
"Password must be at least 8 characters".into(),
));
}
let email = Email::new(cmd.email)?; let email = Email::new(cmd.email)?;
if ctx.user_repository.find_by_email(&email).await?.is_some() { if ctx.user_repository.find_by_email(&email).await?.is_some() {
return Err(DomainError::ValidationError("Email already registered".into())); return Err(DomainError::ValidationError(
"Email already registered".into(),
));
} }
let hash = ctx.password_hasher.hash(&cmd.password).await?; let hash = ctx.password_hasher.hash(&cmd.password).await?;

View File

@@ -16,7 +16,7 @@ pub struct PageParams {
impl PageParams { impl PageParams {
const MAX_LIMIT: u32 = 100; const MAX_LIMIT: u32 = 100;
const DEFAULT_LIMIT: u32 = 20; const DEFAULT_LIMIT: u32 = 5;
pub fn new(limit: Option<u32>, offset: Option<u32>) -> Result<Self, DomainError> { pub fn new(limit: Option<u32>, offset: Option<u32>) -> Result<Self, DomainError> {
let l = limit.unwrap_or(Self::DEFAULT_LIMIT); let l = limit.unwrap_or(Self::DEFAULT_LIMIT);

View File

@@ -15,6 +15,7 @@ pub enum SortDirection {
#[default] #[default]
Descending, Descending,
Ascending, Ascending,
ByRatingDesc,
} }
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
@@ -22,6 +23,7 @@ pub struct DiaryFilter {
pub sort_by: SortDirection, pub sort_by: SortDirection,
pub page: PageParams, pub page: PageParams,
pub movie_id: Option<MovieId>, pub movie_id: Option<MovieId>,
pub user_id: Option<UserId>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -131,14 +133,6 @@ impl Review {
comment: Option<Comment>, comment: Option<Comment>,
watched_at: NaiveDateTime, watched_at: NaiveDateTime,
) -> Result<Self, DomainError> { ) -> Result<Self, DomainError> {
let now = Utc::now().naive_utc();
if watched_at > now {
return Err(DomainError::ValidationError(
"watched_at cannot be in the future".into(),
));
}
Ok(Self { Ok(Self {
id: ReviewId::generate(), id: ReviewId::generate(),
movie_id, movie_id,
@@ -146,7 +140,7 @@ impl Review {
rating, rating,
comment, comment,
watched_at, watched_at,
created_at: now, created_at: Utc::now().naive_utc(),
}) })
} }
@@ -191,6 +185,11 @@ impl Review {
pub fn created_at(&self) -> &NaiveDateTime { pub fn created_at(&self) -> &NaiveDateTime {
&self.created_at &self.created_at
} }
/// Returns [star1_filled, star2_filled, ..., star5_filled]
pub fn stars(&self) -> [bool; 5] {
let r = self.rating.value();
[r >= 1, r >= 2, r >= 3, r >= 4, r >= 5]
}
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -270,3 +269,90 @@ impl User {
&self.password_hash &self.password_hash
} }
} }
#[derive(Clone, Debug)]
pub struct FeedEntry {
entry: DiaryEntry,
user_email: String,
}
impl FeedEntry {
pub fn new(entry: DiaryEntry, user_email: String) -> Self {
Self { entry, user_email }
}
pub fn movie(&self) -> &Movie { self.entry.movie() }
pub fn review(&self) -> &Review { self.entry.review() }
pub fn user_email(&self) -> &str { &self.user_email }
pub fn user_display_name(&self) -> &str {
self.user_email.split('@').next().unwrap_or(&self.user_email)
}
}
#[derive(Clone, Debug)]
pub struct UserSummary {
pub user_id: UserId,
pub email: String,
pub total_movies: i64,
pub avg_rating: Option<f64>,
}
impl UserSummary {
pub fn display_name(&self) -> &str {
self.email.split('@').next().unwrap_or(&self.email)
}
pub fn avg_rating_display(&self) -> String {
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "".to_string())
}
pub fn initial(&self) -> char {
self.display_name().chars().next().unwrap_or('?').to_ascii_uppercase()
}
}
#[derive(Clone, Debug)]
pub struct UserStats {
pub total_movies: i64,
pub avg_rating: Option<f64>,
pub favorite_director: Option<String>,
pub most_active_month: Option<String>,
}
impl UserStats {
pub fn avg_rating_display(&self) -> String {
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "".to_string())
}
pub fn favorite_director_display(&self) -> &str {
self.favorite_director.as_deref().unwrap_or("")
}
pub fn most_active_month_display(&self) -> &str {
self.most_active_month.as_deref().unwrap_or("")
}
}
#[derive(Clone, Debug)]
pub struct MonthActivity {
pub year_month: String,
pub month_label: String,
pub count: i64,
pub entries: Vec<DiaryEntry>,
}
#[derive(Clone, Debug)]
pub struct MonthlyRating {
pub year_month: String,
pub month_label: String,
pub avg_rating: f64,
pub count: i64,
}
#[derive(Clone, Debug)]
pub struct DirectorStat {
pub director: String,
pub count: i64,
}
#[derive(Clone, Debug)]
pub struct UserTrends {
pub monthly_ratings: Vec<MonthlyRating>,
pub top_directors: Vec<DirectorStat>,
pub max_director_count: i64,
}

View File

@@ -4,7 +4,11 @@ use chrono::{DateTime, Utc};
use crate::{ use crate::{
errors::DomainError, errors::DomainError,
events::DomainEvent, events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated}, models::{
DiaryEntry, DiaryFilter, FeedEntry, Movie, Review, ReviewHistory, User, UserStats,
UserTrends, UserSummary,
collections::{PageParams, Paginated},
},
value_objects::{ value_objects::{
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl, Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
ReleaseYear, ReviewId, UserId, ReleaseYear, ReviewId, UserId,
@@ -38,6 +42,17 @@ pub trait MovieRepository: Send + Sync {
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError>; async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError>;
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError>; async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError>;
async fn query_activity_feed(
&self,
page: &PageParams,
) -> Result<Paginated<FeedEntry>, DomainError>;
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError>;
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError>;
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError>;
} }
pub enum MetadataSearchCriteria { pub enum MetadataSearchCriteria {
@@ -89,6 +104,8 @@ pub trait UserRepository: Send + Sync {
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError>; async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError>;
async fn save(&self, user: &User) -> Result<(), DomainError>; async fn save(&self, user: &User) -> Result<(), DomainError>;
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError>; async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError>;
async fn list_with_stats(&self) -> Result<Vec<UserSummary>, DomainError>;
} }
#[async_trait] #[async_trait]

View File

@@ -219,10 +219,18 @@ impl From<DiaryQueryParams> for GetDiaryQuery {
} }
}), }),
movie_id: p.movie_id, movie_id: p.movie_id,
user_id: None,
} }
} }
} }
#[derive(serde::Deserialize, Default)]
pub struct ProfileQueryParams {
pub view: Option<String>,
pub limit: Option<u32>,
pub offset: Option<u32>,
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@@ -102,6 +102,10 @@ mod tests {
async fn get_review_by_id(&self, _: &ReviewId) -> Result<Option<Review>, DomainError> { panic!("unexpected") } async fn get_review_by_id(&self, _: &ReviewId) -> Result<Option<Review>, DomainError> { panic!("unexpected") }
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> { panic!("unexpected") } async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> { panic!("unexpected") }
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { panic!("unexpected") } async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { panic!("unexpected") }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &UserId) -> Result<Vec<DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
} }
#[async_trait] #[async_trait]
@@ -138,6 +142,7 @@ mod tests {
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") } async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") } async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<User>, DomainError> { panic!("unexpected") } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<User>, DomainError> { panic!("unexpected") }
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> { panic!("unexpected") }
} }
#[async_trait] #[async_trait]
@@ -155,7 +160,7 @@ mod tests {
auth_service: Arc::new(PanicAuth), auth_service: Arc::new(PanicAuth),
password_hasher: Arc::new(PanicHasher), password_hasher: Arc::new(PanicHasher),
user_repository: Arc::new(PanicUserRepo), user_repository: Arc::new(PanicUserRepo),
config: AppConfig { allow_registration: false }, config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
} }
} }

View File

@@ -134,6 +134,10 @@ mod tests {
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() } async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() } async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() } async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
} }
struct PanicRenderer; struct PanicRenderer;
@@ -142,11 +146,14 @@ mod tests {
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() } fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() } fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() } fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
} }
struct PanicRssRenderer; struct PanicRssRenderer;
impl crate::ports::RssFeedRenderer for PanicRssRenderer { impl crate::ports::RssFeedRenderer for PanicRssRenderer {
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() } fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
} }
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher; struct PanicAuth; struct PanicUserRepo; struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher; struct PanicAuth; struct PanicUserRepo;
@@ -156,7 +163,7 @@ mod tests {
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
let state = crate::state::AppState { let state = crate::state::AppState {
app_ctx: AppContext { app_ctx: AppContext {
@@ -168,7 +175,7 @@ mod tests {
auth_service: Arc::new(PanicAuth), auth_service: Arc::new(PanicAuth),
password_hasher: Arc::new(PanicHasher), password_hasher: Arc::new(PanicHasher),
user_repository: Arc::new(PanicUserRepo), user_repository: Arc::new(PanicUserRepo),
config: application::config::AppConfig { allow_registration: false }, config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
}, },
html_renderer: Arc::new(PanicRenderer), html_renderer: Arc::new(PanicRenderer),
rss_renderer: Arc::new(PanicRssRenderer), rss_renderer: Arc::new(PanicRssRenderer),
@@ -237,6 +244,10 @@ mod tests {
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() } async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() } async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() } async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
} }
struct PanicMeta2; struct PanicFetcher2; struct PanicStorage2; struct PanicEvent2; struct PanicHasher2; struct PanicUserRepo2; struct PanicMeta2; struct PanicFetcher2; struct PanicStorage2; struct PanicEvent2; struct PanicHasher2; struct PanicUserRepo2;
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta2 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta2 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
@@ -245,17 +256,20 @@ mod tests {
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent2 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent2 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher2 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher2 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth2 { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth2 { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo2 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo2 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
struct PanicRenderer2; struct PanicRenderer2;
impl crate::ports::HtmlRenderer for PanicRenderer2 { impl crate::ports::HtmlRenderer for PanicRenderer2 {
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() } fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() } fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() } fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() } fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
} }
struct PanicRssRenderer2; struct PanicRssRenderer2;
impl crate::ports::RssFeedRenderer for PanicRssRenderer2 { impl crate::ports::RssFeedRenderer for PanicRssRenderer2 {
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() } fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
} }
struct PanicAuth2; struct PanicAuth2;
crate::state::AppState { crate::state::AppState {
@@ -268,7 +282,7 @@ mod tests {
auth_service: Arc::new(PanicAuth2), auth_service: Arc::new(PanicAuth2),
password_hasher: Arc::new(PanicHasher2), password_hasher: Arc::new(PanicHasher2),
user_repository: Arc::new(PanicUserRepo2), user_repository: Arc::new(PanicUserRepo2),
config: application::config::AppConfig { allow_registration: false }, config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
}, },
html_renderer: Arc::new(PanicRenderer2), html_renderer: Arc::new(PanicRenderer2),
rss_renderer: Arc::new(PanicRssRenderer2), rss_renderer: Arc::new(PanicRssRenderer2),
@@ -291,6 +305,10 @@ mod tests {
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() } async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() } async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() } async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
} }
struct PanicMeta3; struct PanicFetcher3; struct PanicStorage3; struct PanicEvent3; struct PanicHasher3; struct PanicUserRepo3; struct PanicMeta3; struct PanicFetcher3; struct PanicStorage3; struct PanicEvent3; struct PanicHasher3; struct PanicUserRepo3;
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta3 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta3 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
@@ -298,17 +316,20 @@ mod tests {
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage3 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage3 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent3 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent3 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher3 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher3 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo3 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo3 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
struct PanicRenderer3; struct PanicRenderer3;
impl crate::ports::HtmlRenderer for PanicRenderer3 { impl crate::ports::HtmlRenderer for PanicRenderer3 {
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() } fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() } fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() } fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() } fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
} }
struct PanicRssRenderer3; struct PanicRssRenderer3;
impl crate::ports::RssFeedRenderer for PanicRssRenderer3 { impl crate::ports::RssFeedRenderer for PanicRssRenderer3 {
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() } fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
} }
crate::state::AppState { crate::state::AppState {
app_ctx: AppContext { app_ctx: AppContext {
@@ -320,7 +341,7 @@ mod tests {
auth_service: Arc::new(RejectingAuth), auth_service: Arc::new(RejectingAuth),
password_hasher: Arc::new(PanicHasher3), password_hasher: Arc::new(PanicHasher3),
user_repository: Arc::new(PanicUserRepo3), user_repository: Arc::new(PanicUserRepo3),
config: application::config::AppConfig { allow_registration: false }, config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
}, },
html_renderer: Arc::new(PanicRenderer3), html_renderer: Arc::new(PanicRenderer3),
rss_renderer: Arc::new(PanicRssRenderer3), rss_renderer: Arc::new(PanicRssRenderer3),

View File

@@ -1,3 +1,6 @@
const DEFAULT_PAGE_LIMIT: u32 = 5;
const RSS_FEED_LIMIT: u32 = 50;
pub mod html { pub mod html {
use axum::{ use axum::{
extract::{Path, Query, State}, extract::{Path, Query, State},
@@ -11,13 +14,12 @@ pub mod html {
use application::{ use application::{
commands::{DeleteReviewCommand, LoginCommand, RegisterCommand}, commands::{DeleteReviewCommand, LoginCommand, RegisterCommand},
ports::{HtmlPageContext, LoginPageData, NewReviewPageData, RegisterPageData}, ports::{HtmlPageContext, LoginPageData, NewReviewPageData, RegisterPageData},
use_cases::{delete_review, get_diary, log_review, login as login_uc, register as register_uc}, use_cases::{delete_review, log_review, login as login_uc, register as register_uc},
}; };
use domain::{errors::DomainError, value_objects::UserId}; use domain::{errors::DomainError, value_objects::UserId};
use crate::{ use crate::{
dtos::{DiaryQueryParams, ErrorQuery, LoginForm, LogReviewData, LogReviewForm, RegisterForm}, dtos::{DiaryQueryParams, ErrorQuery, LoginForm, LogReviewData, LogReviewForm, RegisterForm},
errors::ApiError,
extractors::{OptionalCookieUser, RequiredCookieUser}, extractors::{OptionalCookieUser, RequiredCookieUser},
state::AppState, state::AppState,
}; };
@@ -40,6 +42,9 @@ pub mod html {
user_email, user_email,
user_id: uuid, user_id: uuid,
register_enabled: state.app_ctx.config.allow_registration, register_enabled: state.app_ctx.config.allow_registration,
rss_url: "/feed.rss".to_string(),
page_title: "Movies Diary".to_string(),
canonical_url: state.app_ctx.config.base_url.clone(),
} }
} }
@@ -50,27 +55,16 @@ pub mod html {
.replace('"', "%22") .replace('"', "%22")
} }
fn set_cookie_header(token: &str, max_age: i64) -> (axum::http::HeaderName, HeaderValue) { fn secure_flag() -> &'static str {
let val = format!( if std::env::var("SECURE_COOKIES").as_deref() == Ok("true") { "; Secure" } else { "" }
"token={}; HttpOnly; Path=/; SameSite=Lax; Max-Age={}",
token, max_age
);
(SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"))
} }
pub async fn get_index( fn set_cookie_header(token: &str, max_age: i64) -> (axum::http::HeaderName, HeaderValue) {
OptionalCookieUser(user_id): OptionalCookieUser, let val = format!(
State(state): State<AppState>, "token={}; HttpOnly; Path=/; SameSite=Strict; Max-Age={}{}",
Query(params): Query<DiaryQueryParams>, token, max_age, secure_flag()
) -> Result<impl IntoResponse, ApiError> { );
let query = params.into(); (SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"))
let ctx = build_page_context(&state, user_id).await;
let page = get_diary::execute(&state.app_ctx, query).await?;
let html = state
.html_renderer
.render_diary_page(&page, ctx)
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
Ok(Html(html))
} }
pub async fn get_login_page( pub async fn get_login_page(
@@ -81,6 +75,9 @@ pub mod html {
user_email: None, user_email: None,
user_id: None, user_id: None,
register_enabled: state.app_ctx.config.allow_registration, register_enabled: state.app_ctx.config.allow_registration,
rss_url: "/feed.rss".to_string(),
page_title: "Login — Movies Diary".to_string(),
canonical_url: format!("{}/login", state.app_ctx.config.base_url),
}; };
let html = state let html = state
.html_renderer .html_renderer
@@ -115,10 +112,8 @@ pub mod html {
} }
pub async fn get_logout() -> impl IntoResponse { pub async fn get_logout() -> impl IntoResponse {
let cookie = ( let val = format!("token=; HttpOnly; Path=/; SameSite=Strict; Max-Age=0{}", secure_flag());
SET_COOKIE, let cookie = (SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"));
HeaderValue::from_static("token=; HttpOnly; Path=/; SameSite=Lax; Max-Age=0"),
);
([cookie], Redirect::to("/")).into_response() ([cookie], Redirect::to("/")).into_response()
} }
@@ -133,6 +128,9 @@ pub mod html {
user_email: None, user_email: None,
user_id: None, user_id: None,
register_enabled: true, register_enabled: true,
rss_url: "/feed.rss".to_string(),
page_title: "Register — Movies Diary".to_string(),
canonical_url: format!("{}/register", state.app_ctx.config.base_url),
}; };
let html = state let html = state
.html_renderer .html_renderer
@@ -172,9 +170,8 @@ pub mod html {
Err(_) => Redirect::to("/login").into_response(), Err(_) => Redirect::to("/login").into_response(),
} }
} }
Err(e) => { Err(_) => {
let msg = encode_error(&e.to_string()); Redirect::to("/register?error=Registration+failed.+Please+try+again.").into_response()
Redirect::to(&format!("/register?error={}", msg)).into_response()
} }
} }
} }
@@ -184,7 +181,9 @@ pub mod html {
State(state): State<AppState>, State(state): State<AppState>,
Query(params): Query<ErrorQuery>, Query(params): Query<ErrorQuery>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let ctx = build_page_context(&state, Some(user_id)).await; let mut ctx = build_page_context(&state, Some(user_id)).await;
ctx.page_title = "Log a Review — Movies Diary".to_string();
ctx.canonical_url = format!("{}/reviews/new", state.app_ctx.config.base_url);
let html = state let html = state
.html_renderer .html_renderer
.render_new_review_page(NewReviewPageData { .render_new_review_page(NewReviewPageData {
@@ -235,6 +234,117 @@ pub mod html {
} }
} }
} }
pub async fn get_activity_feed(
OptionalCookieUser(user_id): OptionalCookieUser,
State(state): State<AppState>,
Query(params): Query<DiaryQueryParams>,
) -> impl IntoResponse {
let ctx = build_page_context(&state, user_id).await;
let query = application::queries::GetActivityFeedQuery {
limit: params.limit,
offset: params.offset,
};
match application::use_cases::get_activity_feed::execute(&state.app_ctx, query).await {
Ok(entries) => {
let limit = entries.limit;
let offset = entries.offset;
let has_more = (offset as u64).saturating_add(limit as u64) < entries.total_count;
let data = application::ports::ActivityFeedPageData {
ctx,
current_offset: offset,
has_more,
limit,
entries,
};
match state.html_renderer.render_activity_feed_page(data) {
Ok(html) => Html(html).into_response(),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e).into_response(),
}
}
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}
}
pub async fn get_users_list(
OptionalCookieUser(user_id): OptionalCookieUser,
State(state): State<AppState>,
) -> impl IntoResponse {
let mut ctx = build_page_context(&state, user_id).await;
ctx.page_title = "Members — Movies Diary".to_string();
ctx.canonical_url = format!("{}/users", state.app_ctx.config.base_url);
match application::use_cases::get_users::execute(&state.app_ctx, application::queries::GetUsersQuery).await {
Ok(users) => {
let data = application::ports::UsersPageData { ctx, users };
match state.html_renderer.render_users_page(data) {
Ok(html) => Html(html).into_response(),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e).into_response(),
}
}
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}
}
pub async fn get_user_profile(
OptionalCookieUser(user_id): OptionalCookieUser,
State(state): State<AppState>,
Path(profile_user_uuid): Path<Uuid>,
Query(params): Query<crate::dtos::ProfileQueryParams>,
) -> impl IntoResponse {
let mut ctx = build_page_context(&state, user_id).await;
let view = params.view.unwrap_or_else(|| "recent".to_string());
let profile_user = match state.app_ctx.user_repository
.find_by_id(&domain::value_objects::UserId::from_uuid(profile_user_uuid))
.await
{
Ok(Some(u)) => u,
Ok(None) => return (StatusCode::NOT_FOUND, "User not found").into_response(),
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
};
let display_name = profile_user.email().value()
.split('@').next().unwrap_or("User");
ctx.page_title = format!("{}'s Diary — Movies Diary", display_name);
ctx.canonical_url = format!("{}/users/{}", state.app_ctx.config.base_url, profile_user_uuid);
let query = application::queries::GetUserProfileQuery {
user_id: profile_user_uuid,
view: view.clone(),
limit: params.limit,
offset: params.offset,
};
match application::use_cases::get_user_profile::execute(&state.app_ctx, query).await {
Ok(profile) => {
let (offset, has_more, limit) = profile.entries.as_ref()
.map(|e| {
let has_more = (e.offset as u64).saturating_add(e.limit as u64) < e.total_count;
(e.offset, has_more, e.limit)
})
.unwrap_or((0, false, super::DEFAULT_PAGE_LIMIT));
ctx.rss_url = format!("/users/{}/feed.rss", profile_user_uuid);
let data = application::ports::ProfilePageData {
ctx,
profile_user_id: profile_user_uuid,
profile_user_email: profile_user.email().value().to_string(),
stats: profile.stats,
view,
entries: profile.entries,
current_offset: offset,
has_more,
limit,
history: profile.history,
trends: profile.trends,
};
match state.html_renderer.render_profile_page(data) {
Ok(html) => Html(html).into_response(),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e).into_response(),
}
}
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}
}
} }
pub mod posters { pub mod posters {
@@ -270,30 +380,64 @@ pub mod posters {
pub mod rss { pub mod rss {
use axum::{ use axum::{
extract::State, extract::{Path, State},
http::header, http::header,
response::IntoResponse, response::IntoResponse,
}; };
use uuid::Uuid;
use application::{queries::GetDiaryQuery, use_cases::get_diary}; use application::{queries::GetDiaryQuery, use_cases::get_diary};
use domain::{errors::DomainError, models::SortDirection}; use domain::{errors::DomainError, models::SortDirection, value_objects::UserId};
use crate::{errors::ApiError, state::AppState}; use crate::{errors::ApiError, state::AppState};
pub async fn get_feed(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> { pub async fn get_feed(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> {
let query = GetDiaryQuery { let query = GetDiaryQuery {
limit: Some(50), limit: Some(super::RSS_FEED_LIMIT),
offset: Some(0), offset: Some(0),
sort_by: Some(SortDirection::Descending), sort_by: Some(SortDirection::Descending),
movie_id: None, movie_id: None,
user_id: None,
}; };
let page = get_diary::execute(&state.app_ctx, query).await?; let page = get_diary::execute(&state.app_ctx, query).await?;
let xml = state let xml = state
.rss_renderer .rss_renderer
.render_feed(&page.items) .render_feed(&page.items, "Movie Diary")
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?; .map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
Ok(([(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml)) Ok(([(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml))
} }
pub async fn get_user_feed(
State(state): State<AppState>,
Path(user_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let user = state
.app_ctx
.user_repository
.find_by_id(&UserId::from_uuid(user_id))
.await
.map_err(ApiError)?
.ok_or_else(|| ApiError(DomainError::NotFound(format!("User {user_id}"))))?;
let query = GetDiaryQuery {
limit: Some(super::RSS_FEED_LIMIT),
offset: Some(0),
sort_by: Some(SortDirection::Descending),
movie_id: None,
user_id: Some(user_id),
};
let page = get_diary::execute(&state.app_ctx, query).await?;
let display_name = user.email().value().split('@').next().unwrap_or("User");
let title = format!("{}'s Movie Diary", display_name);
let xml = state
.rss_renderer
.render_feed(&page.items, &title)
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
Ok(([(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml))
}
} }
pub mod api { pub mod api {

View File

@@ -32,8 +32,11 @@ async fn main() -> anyhow::Result<()> {
let app = routes::build_router(state); let app = routes::build_router(state);
let listener = TcpListener::bind("0.0.0.0:3000").await?; let host = std::env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string());
tracing::info!("Listening on 0.0.0.0:3000"); let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string());
let addr = format!("{}:{}", host, port);
let listener = TcpListener::bind(&addr).await?;
tracing::info!("Listening on {}", addr);
axum::serve(listener, app).await?; axum::serve(listener, app).await?;
Ok(()) Ok(())
@@ -48,7 +51,9 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL must be set")?; let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL must be set")?;
let opts = SqliteConnectOptions::from_str(&database_url) let opts = SqliteConnectOptions::from_str(&database_url)
.context("Invalid DATABASE_URL")? .context("Invalid DATABASE_URL")?
.create_if_missing(true); .create_if_missing(true)
.journal_mode(sqlx::sqlite::SqliteJournalMode::Wal)
.busy_timeout(std::time::Duration::from_secs(5));
let pool = SqlitePool::connect_with(opts) let pool = SqlitePool::connect_with(opts)
.await .await
.context("Failed to connect to SQLite database")?; .context("Failed to connect to SQLite database")?;
@@ -109,8 +114,7 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
app_ctx, app_ctx,
html_renderer: Arc::new(AskamaHtmlRenderer::new()), html_renderer: Arc::new(AskamaHtmlRenderer::new()),
rss_renderer: Arc::new(RssAdapter::new( rss_renderer: Arc::new(RssAdapter::new(
"Movie Diary".into(), std::env::var("BASE_URL").unwrap_or_else(|_| "http://localhost:3000".into()),
"http://localhost:3000".into(),
)), )),
}) })
} }

View File

@@ -1,8 +1,52 @@
use axum::{Router, routing}; use std::sync::{
Arc,
atomic::{AtomicU64, Ordering},
};
use std::time::{SystemTime, UNIX_EPOCH};
use axum::{Router, http::StatusCode, middleware, response::IntoResponse, routing};
use tower_http::{services::ServeDir, trace::TraceLayer}; use tower_http::{services::ServeDir, trace::TraceLayer};
use crate::{handlers, state::AppState}; use crate::{handlers, state::AppState};
const API_RATE_LIMIT: u64 = 20; // 20 requests per minute globally for API routes
/// Simple global rate limiter: tracks request count per 60-second window.
/// Not per-IP — suitable for a low-traffic personal app.
#[derive(Clone)]
struct RateLimiter {
window: Arc<AtomicU64>,
count: Arc<AtomicU64>,
limit: u64,
}
impl RateLimiter {
fn new(limit: u64) -> Self {
Self {
window: Arc::new(AtomicU64::new(0)),
count: Arc::new(AtomicU64::new(0)),
limit,
}
}
fn check(&self) -> bool {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs()
/ 60;
let prev = self.window.load(Ordering::Acquire);
if now != prev {
// compare_exchange ensures only one thread wins the window reset
if self.window.compare_exchange(prev, now, Ordering::AcqRel, Ordering::Relaxed).is_ok() {
self.count.store(1, Ordering::Release);
return true;
}
}
self.count.fetch_add(1, Ordering::Relaxed) + 1 <= self.limit
}
}
pub fn build_router(state: AppState) -> Router { pub fn build_router(state: AppState) -> Router {
Router::new() Router::new()
.merge(html_routes()) .merge(html_routes())
@@ -13,27 +57,73 @@ pub fn build_router(state: AppState) -> Router {
} }
fn html_routes() -> Router<AppState> { fn html_routes() -> Router<AppState> {
Router::new() // Auth routes: 20 requests per minute globally.
.route("/", routing::get(handlers::html::get_index)) let limiter = RateLimiter::new(API_RATE_LIMIT);
let auth = Router::new()
.route( .route(
"/login", "/login",
routing::get(handlers::html::get_login_page) routing::get(handlers::html::get_login_page).post(handlers::html::post_login),
.post(handlers::html::post_login),
) )
.route("/logout", routing::get(handlers::html::get_logout)) .route("/logout", routing::get(handlers::html::get_logout))
.route( .route(
"/register", "/register",
routing::get(handlers::html::get_register_page) routing::get(handlers::html::get_register_page).post(handlers::html::post_register),
.post(handlers::html::post_register), )
.route_layer(middleware::from_fn(
move |req: axum::extract::Request, next: middleware::Next| {
let limiter = limiter.clone();
async move {
if limiter.check() {
next.run(req).await
} else {
StatusCode::TOO_MANY_REQUESTS.into_response()
}
}
},
));
Router::new()
.route("/", routing::get(handlers::html::get_activity_feed))
.route("/users", routing::get(handlers::html::get_users_list))
.route(
"/users/{id}",
routing::get(handlers::html::get_user_profile),
)
.merge(auth)
.route(
"/reviews/new",
routing::get(handlers::html::get_new_review_page),
) )
.route("/reviews/new", routing::get(handlers::html::get_new_review_page))
.route("/reviews", routing::post(handlers::html::post_review)) .route("/reviews", routing::post(handlers::html::post_review))
.route("/reviews/{id}/delete", routing::post(handlers::html::post_delete_review)) .route(
.route("/posters/{path}", routing::get(handlers::posters::get_poster)) "/reviews/{id}/delete",
routing::post(handlers::html::post_delete_review),
)
.route(
"/posters/{path}",
routing::get(handlers::posters::get_poster),
)
.route("/feed.rss", routing::get(handlers::rss::get_feed)) .route("/feed.rss", routing::get(handlers::rss::get_feed))
.route(
"/users/{id}/feed.rss",
routing::get(handlers::rss::get_user_feed),
)
} }
fn api_routes() -> Router<AppState> { fn api_routes() -> Router<AppState> {
let limiter = RateLimiter::new(API_RATE_LIMIT);
let auth_rate_limit =
middleware::from_fn(move |req: axum::extract::Request, next: middleware::Next| {
let limiter = limiter.clone();
async move {
if limiter.check() {
next.run(req).await
} else {
StatusCode::TOO_MANY_REQUESTS.into_response()
}
}
});
Router::new().nest( Router::new().nest(
"/api", "/api",
Router::new() Router::new()
@@ -43,12 +133,16 @@ fn api_routes() -> Router<AppState> {
routing::get(handlers::api::get_review_history), routing::get(handlers::api::get_review_history),
) )
.route("/reviews", routing::post(handlers::api::post_review)) .route("/reviews", routing::post(handlers::api::post_review))
.route("/reviews/{id}", routing::delete(handlers::api::delete_review)) .route(
"/reviews/{id}",
routing::delete(handlers::api::delete_review),
)
.route( .route(
"/movies/{id}/sync-poster", "/movies/{id}/sync-poster",
routing::post(handlers::api::sync_poster), routing::post(handlers::api::sync_poster),
) )
.route("/auth/login", routing::post(handlers::api::login)) .route("/auth/login", routing::post(handlers::api::login))
.route("/auth/register", routing::post(handlers::api::register)), .route("/auth/register", routing::post(handlers::api::register))
.route_layer(auth_rate_limit),
) )
} }

View File

@@ -85,6 +85,7 @@ impl UserRepository for NobodyUserRepo {
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { Ok(None) } async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { Ok(None) }
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!() } async fn save(&self, _: &User) -> Result<(), DomainError> { panic!() }
async fn find_by_id(&self, _: &UserId) -> Result<Option<User>, DomainError> { panic!() } async fn find_by_id(&self, _: &UserId) -> Result<Option<User>, DomainError> { panic!() }
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> { panic!() }
} }
async fn test_app() -> Router { async fn test_app() -> Router {
@@ -104,10 +105,10 @@ async fn test_app() -> Router {
auth_service: Arc::new(PanicAuth), auth_service: Arc::new(PanicAuth),
password_hasher: Arc::new(PanicHasher), password_hasher: Arc::new(PanicHasher),
user_repository: Arc::new(NobodyUserRepo), user_repository: Arc::new(NobodyUserRepo),
config: AppConfig { allow_registration: false }, config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
}, },
html_renderer: Arc::new(AskamaHtmlRenderer::new()), html_renderer: Arc::new(AskamaHtmlRenderer::new()),
rss_renderer: Arc::new(RssAdapter::new("Movie Diary".into(), "http://localhost:3000".into())), rss_renderer: Arc::new(RssAdapter::new("http://localhost:3000".into())),
}; };
routes::build_router(state) routes::build_router(state)
@@ -128,7 +129,7 @@ async fn get_api_diary_returns_empty_list() {
assert_eq!(json["total_count"], 0); assert_eq!(json["total_count"], 0);
assert_eq!(json["items"], serde_json::json!([])); assert_eq!(json["items"], serde_json::json!([]));
assert_eq!(json["limit"], 20); assert_eq!(json["limit"], 5);
assert_eq!(json["offset"], 0); assert_eq!(json["offset"], 0);
} }

View File

@@ -1,620 +0,0 @@
# Event-Driven Poster Sync Implementation Plan
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
**Goal:** Add an `EventHandler` trait to the event-publisher adapter and implement `PosterSyncHandler` so that a `MovieDiscovered` event automatically triggers the existing `sync_poster` use case with exponential-backoff retry.
**Architecture:** `EventWorker` gains a `Vec<Box<dyn EventHandler>>` and fans out each received event to all registered handlers sequentially. `PosterSyncHandler` lives in the `presentation` crate (composition root), holds `AppContext`, and calls `sync_poster::execute` on `MovieDiscovered` events — ignoring all others. Retry is up to 3 retries (4 total attempts) with delays 1s → 2s → 4s.
**Tech Stack:** Rust, tokio::sync::mpsc, async-trait, existing `sync_poster` use case
---
## File Map
| File | Status | Responsibility |
|---|---|---|
| `crates/adapters/event-publisher/src/lib.rs` | Modify | Add `EventHandler` trait; extend `EventWorker` and `create_event_channel` |
| `crates/application/src/commands.rs` | Modify | Add `#[derive(Clone)]` to `SyncPosterCommand` |
| `crates/presentation/src/lib.rs` | Modify | Expose `pub mod event_handlers` |
| `crates/presentation/src/event_handlers.rs` | Create | `PosterSyncHandler` implementation |
| `crates/presentation/src/main.rs` | Modify | Wire `PosterSyncHandler` into `create_event_channel` |
---
## Task 1: Add `EventHandler` trait and update `EventWorker`
**Files:**
- Modify: `crates/adapters/event-publisher/src/lib.rs`
- [ ] **Step 1: Write the failing test**
Add to the bottom of `crates/adapters/event-publisher/src/lib.rs`:
```rust
#[cfg(test)]
mod tests {
use super::*;
use std::sync::{Arc, Mutex};
use async_trait::async_trait;
use domain::{
errors::DomainError,
events::DomainEvent,
value_objects::{ExternalMetadataId, MovieId},
};
struct RecordingHandler {
calls: Arc<Mutex<Vec<String>>>,
}
#[async_trait]
impl EventHandler for RecordingHandler {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
let label = match event {
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
DomainEvent::ReviewLogged { .. } => "review_logged",
};
self.calls.lock().unwrap().push(label.to_string());
Ok(())
}
}
#[tokio::test]
async fn single_handler_receives_event() {
let calls = Arc::new(Mutex::new(vec![]));
let handler = RecordingHandler { calls: Arc::clone(&calls) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
}
#[tokio::test]
async fn multiple_handlers_all_receive_event() {
let calls1 = Arc::new(Mutex::new(vec![]));
let calls2 = Arc::new(Mutex::new(vec![]));
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(
config,
vec![Box::new(handler1), Box::new(handler2)],
);
tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
assert_eq!(calls1.lock().unwrap().len(), 1);
assert_eq!(calls2.lock().unwrap().len(), 1);
}
#[tokio::test]
async fn handler_error_does_not_stop_worker() {
struct FailingHandler;
#[async_trait]
impl EventHandler for FailingHandler {
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
Err(DomainError::InfrastructureError("boom".into()))
}
}
let calls = Arc::new(Mutex::new(vec![]));
let good = RecordingHandler { calls: Arc::clone(&calls) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(
config,
vec![Box::new(FailingHandler), Box::new(good)],
);
tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
// good handler still ran despite failing handler before it
assert_eq!(calls.lock().unwrap().len(), 1);
}
}
```
- [ ] **Step 2: Run tests to verify they fail**
```bash
cargo test -p event-publisher 2>&1 | tail -20
```
Expected: compile errors — `EventHandler` not defined, `create_event_channel` wrong arity.
- [ ] **Step 3: Replace `lib.rs` with updated implementation**
Replace the full content of `crates/adapters/event-publisher/src/lib.rs` with:
```rust
use async_trait::async_trait;
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
use tokio::sync::mpsc;
pub struct EventPublisherConfig {
pub channel_buffer: usize,
}
impl EventPublisherConfig {
pub fn from_env() -> Self {
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(128);
Self { channel_buffer }
}
}
#[async_trait]
pub trait EventHandler: Send + Sync {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
}
pub struct ChannelEventPublisher {
sender: mpsc::Sender<DomainEvent>,
}
#[async_trait]
impl EventPublisher for ChannelEventPublisher {
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
self.sender
.send(event.clone())
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
}
pub struct EventWorker {
receiver: mpsc::Receiver<DomainEvent>,
handlers: Vec<Box<dyn EventHandler>>,
}
impl EventWorker {
pub async fn run(mut self) {
while let Some(event) = self.receiver.recv().await {
match &event {
DomainEvent::ReviewLogged {
review_id,
movie_id,
user_id,
rating,
watched_at,
} => {
tracing::info!(
review_id = %review_id.value(),
movie_id = %movie_id.value(),
user_id = %user_id.value(),
rating = rating.value(),
watched_at = %watched_at,
"event: review_logged"
);
}
DomainEvent::MovieDiscovered {
movie_id,
external_metadata_id,
} => {
tracing::info!(
movie_id = %movie_id.value(),
external_id = external_metadata_id.value(),
"event: movie_discovered"
);
}
}
for handler in &self.handlers {
if let Err(e) = handler.handle(&event).await {
tracing::error!("event handler error: {e}");
}
}
}
tracing::info!("event worker shut down");
}
}
pub fn create_event_channel(
config: EventPublisherConfig,
handlers: Vec<Box<dyn EventHandler>>,
) -> (ChannelEventPublisher, EventWorker) {
let (tx, rx) = mpsc::channel(config.channel_buffer);
(
ChannelEventPublisher { sender: tx },
EventWorker {
receiver: rx,
handlers,
},
)
}
#[cfg(test)]
mod tests {
// paste the test module from Step 1 here
}
```
- [ ] **Step 4: Run tests to verify they pass**
```bash
cargo test -p event-publisher 2>&1 | tail -20
```
Expected: `test result: ok. 3 passed`
- [ ] **Step 5: Commit**
```bash
git add crates/adapters/event-publisher/src/lib.rs
git commit -m "feat(event-publisher): add EventHandler trait and fan-out in EventWorker"
```
---
## Task 2: Derive `Clone` on `SyncPosterCommand`
**Files:**
- Modify: `crates/application/src/commands.rs`
The `PosterSyncHandler` retry loop reconstructs the command on each attempt, which requires `Clone` on `String` (already impl'd) and `Uuid` (Copy) — but it's cleaner to `#[derive(Clone)]` directly.
- [ ] **Step 1: Add `#[derive(Clone)]` to `SyncPosterCommand`**
In `crates/application/src/commands.rs`, find the `SyncPosterCommand` struct (line ~17) and add the derive:
```rust
#[derive(Clone)]
pub struct SyncPosterCommand {
pub movie_id: Uuid,
pub external_metadata_id: String,
}
```
- [ ] **Step 2: Verify it compiles**
```bash
cargo build -p application 2>&1 | tail -10
```
Expected: clean build.
- [ ] **Step 3: Commit**
```bash
git add crates/application/src/commands.rs
git commit -m "feat(application): derive Clone on SyncPosterCommand"
```
---
## Task 3: Implement `PosterSyncHandler`
**Files:**
- Create: `crates/presentation/src/event_handlers.rs`
- Modify: `crates/presentation/src/lib.rs`
- [ ] **Step 1: Write the failing test first — create `event_handlers.rs` with tests only**
Create `crates/presentation/src/event_handlers.rs`:
```rust
use std::time::Duration;
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
use async_trait::async_trait;
use domain::{errors::DomainError, events::DomainEvent};
use event_publisher::EventHandler;
pub struct PosterSyncHandler {
ctx: AppContext,
max_retries: u32,
}
impl PosterSyncHandler {
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
Self { ctx, max_retries }
}
}
#[async_trait]
impl EventHandler for PosterSyncHandler {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
todo!()
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::Arc;
use async_trait::async_trait;
use application::config::AppConfig;
use domain::{
errors::DomainError,
events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
ports::{
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
},
value_objects::{
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
Rating, ReleaseYear, ReviewId, UserId,
},
};
// Panic stubs — never called in the "ignored event" test path
struct PanicRepo;
struct PanicMetadata;
struct PanicFetcher;
struct PanicStorage;
struct PanicAuth;
struct PanicHasher;
struct PanicUserRepo;
struct NoopPublisher;
#[async_trait]
impl MovieRepository for PanicRepo {
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl MetadataClient for PanicMetadata {
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl PosterFetcherClient for PanicFetcher {
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl PosterStorage for PanicStorage {
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl AuthService for PanicAuth {
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl PasswordHasher for PanicHasher {
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl UserRepository for PanicUserRepo {
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
}
#[async_trait]
impl EventPublisher for NoopPublisher {
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
}
fn panic_ctx() -> AppContext {
AppContext {
repository: Arc::new(PanicRepo),
metadata_client: Arc::new(PanicMetadata),
poster_fetcher: Arc::new(PanicFetcher),
poster_storage: Arc::new(PanicStorage),
event_publisher: Arc::new(NoopPublisher),
auth_service: Arc::new(PanicAuth),
password_hasher: Arc::new(PanicHasher),
user_repository: Arc::new(PanicUserRepo),
config: AppConfig { allow_registration: false },
}
}
#[tokio::test]
async fn review_logged_is_ignored() {
let handler = PosterSyncHandler::new(panic_ctx(), 3);
let event = DomainEvent::ReviewLogged {
review_id: ReviewId::generate(),
movie_id: MovieId::generate(),
user_id: UserId::generate(),
rating: Rating::new(4).unwrap(),
watched_at: chrono::NaiveDateTime::from_timestamp_opt(0, 0).unwrap(),
};
// returns Ok without touching any panic stubs
assert!(handler.handle(&event).await.is_ok());
}
}
```
- [ ] **Step 2: Expose the module in `lib.rs`**
Add to `crates/presentation/src/lib.rs`:
```rust
pub mod event_handlers;
```
- [ ] **Step 3: Run the test to verify it fails**
```bash
cargo test -p presentation event_handlers 2>&1 | tail -20
```
Expected: compile error or test failure because `handle` is `todo!()`.
- [ ] **Step 4: Implement `handle` in `PosterSyncHandler`**
Replace the `todo!()` body in `crates/presentation/src/event_handlers.rs`:
```rust
#[async_trait]
impl EventHandler for PosterSyncHandler {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
let (movie_id, external_metadata_id) = match event {
DomainEvent::MovieDiscovered {
movie_id,
external_metadata_id,
} => (movie_id.value(), external_metadata_id.value().to_owned()),
_ => return Ok(()),
};
let mut last_err: Option<DomainError> = None;
for attempt in 0..=self.max_retries {
let cmd = SyncPosterCommand {
movie_id,
external_metadata_id: external_metadata_id.clone(),
};
match sync_poster::execute(&self.ctx, cmd).await {
Ok(()) => return Ok(()),
Err(e) => {
if attempt < self.max_retries {
let delay = Duration::from_secs(2u64.pow(attempt));
tracing::warn!(
attempt = attempt + 1,
max_attempts = self.max_retries + 1,
delay_secs = delay.as_secs(),
"poster sync failed, retrying: {e}"
);
tokio::time::sleep(delay).await;
}
last_err = Some(e);
}
}
}
let err = last_err.unwrap();
tracing::error!(
attempts = self.max_retries + 1,
"poster sync failed after all attempts: {err}"
);
Err(err)
}
}
```
- [ ] **Step 5: Run the test to verify it passes**
```bash
cargo test -p presentation event_handlers 2>&1 | tail -20
```
Expected: `test result: ok. 1 passed`
- [ ] **Step 6: Commit**
```bash
git add crates/presentation/src/event_handlers.rs crates/presentation/src/lib.rs
git commit -m "feat(presentation): implement PosterSyncHandler with retry"
```
---
## Task 4: Wire `PosterSyncHandler` in `main.rs`
**Files:**
- Modify: `crates/presentation/src/main.rs`
- [ ] **Step 1: Add the import**
In `crates/presentation/src/main.rs`, update the import block. The existing line is:
```rust
use event_publisher::{EventPublisherConfig, create_event_channel};
```
Add below it:
```rust
use presentation::event_handlers::PosterSyncHandler;
```
- [ ] **Step 2: Wire the handler**
In `wire_dependencies`, find the two existing lines:
```rust
let (event_publisher, event_worker) = create_event_channel(EventPublisherConfig::from_env());
tokio::spawn(event_worker.run());
```
Replace with:
```rust
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
let (event_publisher, event_worker) = create_event_channel(
EventPublisherConfig::from_env(),
vec![Box::new(poster_handler)],
);
tokio::spawn(event_worker.run());
```
Note: `app_ctx.clone()` is cheap — all fields are `Arc<dyn Trait>`.
- [ ] **Step 3: Build the full workspace**
```bash
cargo build 2>&1 | tail -20
```
Expected: clean build with no errors.
- [ ] **Step 4: Run all tests**
```bash
cargo test 2>&1 | tail -20
```
Expected: all tests pass.
- [ ] **Step 5: Commit**
```bash
git add crates/presentation/src/main.rs
git commit -m "feat(presentation): wire PosterSyncHandler into event worker"
```
---
## Verification
After all tasks complete, smoke-test end-to-end:
```bash
# Start the server
RUST_LOG=info cargo run -p presentation
# In another terminal: log a review for a movie not yet in the DB
# (requires valid JWT — use the existing login endpoint first)
# Watch the server logs for:
# event: movie_discovered movie_id=<uuid> external_id=tt...
# poster sync attempt logs (or success with no retries needed)
```
To confirm the poster was stored, check the configured object store bucket/directory for a file named with the movie's UUID.

View File

@@ -1,115 +0,0 @@
# Event-Driven Poster Sync
**Date:** 2026-05-04
**Status:** Approved
## Problem
The `EventPublisher` infrastructure exists but only logs events via tracing. When a new movie is discovered (`MovieDiscovered` event), its poster should be automatically downloaded and stored — currently this requires a manual `POST /api/movies/{id}/sync-poster` call.
## Scope
- Introduce an `EventHandler` trait for composable event side-effects
- Implement `PosterSyncHandler` that reacts to `MovieDiscovered` by running the existing `sync_poster` use case with retry
- RSS feed is already generated fresh on every request — no event work needed there
## Design
### `EventHandler` trait (in `event-publisher` crate)
```rust
#[async_trait]
pub trait EventHandler: Send + Sync {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
}
```
Lives in `event-publisher` crate alongside `ChannelEventPublisher`. Depends only on `domain` — no new crate dependencies required.
### `EventWorker` update
`EventWorker` gains a `handlers: Vec<Box<dyn EventHandler>>` field. On each received event:
1. Log the event via tracing (existing behavior, kept as default baseline)
2. Fan out to all handlers concurrently (or sequentially — see note below)
3. Handler errors are logged at ERROR level but do not stop the worker or other handlers
`create_event_channel` signature gains a `handlers` parameter:
```rust
pub fn create_event_channel(
config: EventPublisherConfig,
handlers: Vec<Box<dyn EventHandler>>,
) -> (ChannelEventPublisher, EventWorker)
```
**Fan-out strategy:** sequential for now (simpler, avoids concurrent mutation of shared state). If handler latency becomes a concern, switch to `tokio::join_all`.
### `PosterSyncHandler` (in `presentation` crate)
New file: `crates/presentation/src/event_handlers.rs`
```rust
pub struct PosterSyncHandler {
ctx: AppContext,
max_retries: u32,
}
```
Behavior per event:
- `MovieDiscovered { movie_id, external_metadata_id }` → build `SyncPosterCommand`, call `sync_poster::execute()` with exponential backoff
- All other events → `Ok(())` immediately
**Retry logic:** up to `max_retries` retries after the initial attempt (default: 3, so 4 total attempts), with exponential backoff delays 1s → 2s → 4s before each retry. After exhausting all attempts, logs at ERROR level and returns `Err`.
### Wiring (`main.rs`)
```rust
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
let (event_publisher, event_worker) = create_event_channel(
EventPublisherConfig::from_env(),
vec![Box::new(poster_handler)],
);
tokio::spawn(event_worker.run());
```
`AppContext` is `Clone` (all fields are `Arc<dyn Trait>`), so cloning for the handler is cheap.
## Data Flow
```
POST /api/diary
→ log_review::execute()
→ movie not in DB → fetch metadata → MovieDiscovered published
→ review saved → ReviewLogged published
mpsc channel
EventWorker::run()
→ tracing log
→ PosterSyncHandler::handle(MovieDiscovered)
→ sync_poster::execute() [attempt 1]
→ on failure: sleep 1s → attempt 2
→ on failure: sleep 2s → attempt 3
→ on failure: log ERROR, done
```
## Files Changed
| File | Change |
|---|---|
| `crates/adapters/event-publisher/src/lib.rs` | Add `EventHandler` trait; update `EventWorker` and `create_event_channel` |
| `crates/presentation/src/event_handlers.rs` | New — `PosterSyncHandler` |
| `crates/presentation/src/main.rs` | Wire `PosterSyncHandler` into `create_event_channel` |
No new crate dependencies. No changes to domain or application layers.
## Verification
```bash
cargo build # full workspace clean build
cargo test # existing tests still pass
# Manual: log a review for a new movie
# → check logs for "event: movie_discovered"
# → check logs for poster sync attempt
# → check object store / storage for saved poster file
```

View File

@@ -1,30 +0,0 @@
# Frontend HTML/CSS Design
**Date:** 2026-05-04
## Summary
Server-rendered HTML frontend using Rust/Axum + Askama templates + HTTP-only cookie JWT auth. No JavaScript.
## Pages
| Route | Access | Description |
|---|---|---|
| GET / | public | Diary index |
| GET /login | public | Login form |
| POST /login | public | Set cookie → redirect / |
| GET /logout | — | Clear cookie → redirect / |
| GET /register | public | Only if ALLOW_REGISTRATION |
| POST /register | public | Set cookie → redirect / |
| GET /reviews/new | auth | New review form |
| POST /reviews | auth | Log review → redirect / |
## Design Decisions
- **Auth:** Cookie-based JWT (HttpOnly, SameSite=Lax). Existing Bearer auth untouched.
- **Template inheritance:** base.html owns header. Child templates use {% extends %}/{% block %}.
- **Entry layout:** Poster thumbnail (60px) + text block. Fallback to text-only when no poster.
- **Header (logged out):** [Login] [Register?]
- **Header (logged in):** [Add Review] email@example.com [Logout]
- **Form errors:** PRG → redirect back with ?error=<msg>
- **Diary visibility:** Public (anyone can read, auth required to add)

View File

@@ -4,32 +4,36 @@ This project is a self-hosted, server-side rendered movie logging system designe
## Core Principles ## Core Principles
* **Zero-JS & Bloat-Free:** The web interface relies strictly on standard HTML form submissions and server-side rendering (via Askama). There is absolutely no JavaScript, no Single Page Application (SPA) overhead, and no client-side state to manage. - **Zero-JS & Bloat-Free:** The web interface relies strictly on standard HTML form submissions and server-side rendering (via Askama). There is absolutely no JavaScript, no Single Page Application (SPA) overhead, and no client-side state to manage.
* **Personal & Embeddable:** It is designed for a single actor. Rather than being a commercial product or a bloated social network, it functions as a highly personal, iframe-ready widget for a personal site. - **Personal & Embeddable:** It is designed for a single actor. Rather than being a commercial product or a bloated social network, it functions as a highly personal, iframe-ready widget for a personal site.
* **Append-Only Ledger:** Reviews are not rows to be updated; they are immutable events. The system tracks a chronological history of viewings for the same movie, separating domain time (when it was watched) from system time (when it was logged), allowing the user to track how their cinematic taste evolves over time. - **Append-Only Ledger:** Reviews are not rows to be updated; they are immutable events. The system tracks a chronological history of viewings for the same movie, separating domain time (when it was watched) from system time (when it was logged), allowing the user to track how their cinematic taste evolves over time.
* **Pristine Architecture:** It strictly adheres to Domain-Driven Design (DDD) and Hexagonal Architecture (Ports and Adapters). The core domain consists of strong value objects and pure business logic, entirely decoupled from external infrastructure like the SQLite database, TMDB API, and Axum HTTP router. - **Pristine Architecture:** It strictly adheres to Domain-Driven Design (DDD) and Hexagonal Architecture (Ports and Adapters). The core domain consists of strong value objects and pure business logic, entirely decoupled from external infrastructure like the SQLite database, TMDB API, and Axum HTTP router.
* **Frictionless "Lazy" Logging:** While the backend is robust, the user experience is minimal. The system automatically fetches rich metadata and poster art in the background via external APIs, requiring only a TMDB ID and a 0-5 rating. It supports both classic HTML forms and a secure REST API for quick terminal or iOS shortcut entries. - **Frictionless "Lazy" Logging:** While the backend is robust, the user experience is minimal. The system automatically fetches rich metadata and poster art in the background via external APIs, requiring only a TMDB ID and a 0-5 rating. It supports both classic HTML forms and a secure REST API for quick terminal or iOS shortcut entries.
* **Old-School Syndication:** Instead of jumping into complex federalized moderation, the project embraces classic, open web standards by generating a native RSS/Atom feed, allowing others to subscribe to the movie diary without needing an account. - **Old-School Syndication:** Instead of jumping into complex federalized moderation, the project embraces classic, open web standards by generating a native RSS/Atom feed, allowing others to subscribe to the movie diary without needing an account.
# common
A tiny utility crate. This holds custom application error types (e.g., `AppError`) using a crate `thiserror`. Every other crate can use this so the project has unified error handling from the database right up to the HTTP response.
# domain # domain
The absolute center. It has zero dependencies on other workspace crates. It holds pure data structures like `Movie`, `Review`, and `User`. This is also where project define the interfaces (Traits) like `MovieRepository`, `MetadataClient`, and `TokenValidator`. It does not know about infrastructure implementations like `SQLite` or JWTs. The absolute center. It has zero dependencies on other workspace crates. It holds pure data structures like `Movie`, `Review`, and `User`. This is also where project define the interfaces (Traits) like `MovieRepository`, `MetadataClient`, and `TokenValidator`. It does not know about infrastructure implementations like `SQLite` or JWTs.
# application # application
It sits between web endpoints and domain. It holds "Use Cases" (e.g., `LogNewMovie`, `GetRecentWatches`). When a request comes in, this crate coordinates the workflow: it asks the adapter-meta for the TMDB data, validates the 0-5 rating using domain rules, and tells database adapter to save it. It sits between web endpoints and domain. It holds "Use Cases" (e.g., `LogNewMovie`, `GetRecentWatches`). When a request comes in, this crate coordinates the workflow: it asks the adapter-meta for the TMDB data, validates the 0-5 rating using domain rules, and tells database adapter to save it.
# sqlite # sqlite
`SQLite` and `sqlx` implementation. It implements the `MovieRepository` trait defined in the domain. `SQLite` and `sqlx` implementation. It implements the `MovieRepository` trait defined in the domain.
# metadata # metadata
HTTP client (likely `reqwest`) that talks to `TMDB` or `OMDb`. It implements the `MetadataClient` trait. HTTP client (likely `reqwest`) that talks to `TMDB` or `OMDb`. It implements the `MetadataClient` trait.
# auth # auth
This handles the JWT logic using a crate like `jsonwebtoken`. It issues the tokens when you log in and implements a `TokenValidator` trait to verify claims (like your admin ID) when a request is made. This handles the JWT logic using a crate like `jsonwebtoken`. It issues the tokens when you log in and implements a `TokenValidator` trait to verify claims (like your admin ID) when a request is made.
# presentation # presentation
It wires all the traits and adapters together into Axum's application state. Inside this crate, you can split your routing into two clean modules: It wires all the traits and adapters together into Axum's application state. Inside this crate, you can split your routing into two clean modules:
* `html_routes`: Uses Askama templates, handles standard form submissions, and checks for the JWT in cookies.
* `rest_routes`: Speaks purely in JSON, handles your background API calls, and checks for the JWT in the Bearer header. - `html_routes`: Uses Askama templates, handles standard form submissions, and checks for the JWT in cookies.
- `rest_routes`: Speaks purely in JSON, handles your background API calls, and checks for the JWT in the Bearer header.

View File

@@ -34,6 +34,21 @@ body {
background: url("/static/background.avif") center / cover no-repeat fixed; background: url("/static/background.avif") center / cover no-repeat fixed;
min-height: 100%; min-height: 100%;
line-height: 1.5; line-height: 1.5;
position: relative;
}
body::before {
content: "";
position: fixed;
inset: 0;
background: rgba(0, 0, 0, 0.2);
z-index: 0;
pointer-events: none;
}
body > * {
position: relative;
z-index: 1;
} }
a { a {
@@ -88,11 +103,6 @@ nav a:hover {
box-shadow: 0 0 12px var(--primary-glow); box-shadow: 0 0 12px var(--primary-glow);
} }
.user-email {
color: var(--text-muted);
font-size: 0.85em;
}
/* Diary entries */ /* Diary entries */
.diary { .diary {
display: flex; display: flex;
@@ -386,3 +396,124 @@ form button[type="submit"]:hover {
margin-bottom: 16px; margin-bottom: 16px;
font-size: 0.9em; font-size: 0.9em;
} }
/* ---- Activity feed ---- */
.feed-meta {
display: flex;
align-items: center;
gap: 0.5rem;
margin-top: 0.25rem;
font-size: 0.8rem;
opacity: 0.7;
}
.feed-user {
color: var(--primary);
text-decoration: none;
font-weight: 600;
}
.feed-user:hover { text-decoration: underline; }
.feed-time { opacity: 0.6; }
/* ---- Users list ---- */
.users-list { display: flex; flex-direction: column; gap: 0.75rem; }
.page-title { font-size: 1.2rem; font-weight: 700; margin-bottom: 1rem; opacity: 0.9; }
.user-row {
display: flex;
align-items: center;
gap: 1rem;
background: rgba(255,255,255,0.07);
border-radius: 12px;
padding: 0.75rem 1rem;
}
.user-avatar {
width: 40px; height: 40px;
border-radius: 50%;
background: rgba(74,158,255,0.2);
display: flex; align-items: center; justify-content: center;
font-size: 1.1rem; font-weight: 700;
flex-shrink: 0;
}
.user-info { flex: 1; }
.user-name { font-weight: 600; font-size: 0.95rem; }
.user-meta { font-size: 0.8rem; opacity: 0.6; margin-top: 0.1rem; }
.btn-secondary {
color: var(--primary);
font-size: 0.85rem;
text-decoration: none;
white-space: nowrap;
}
.btn-secondary:hover { text-decoration: underline; }
/* ---- Profile stats header ---- */
.profile { display: flex; flex-direction: column; gap: 1rem; }
.stats-header {
background: rgba(255,255,255,0.06);
border-radius: 14px;
padding: 1rem 1.25rem;
}
.profile-name { font-size: 1.1rem; font-weight: 700; margin-bottom: 0.75rem; }
.stats-grid { display: grid; grid-template-columns: repeat(4, 1fr); gap: 0.5rem; }
.stat-tile {
background: rgba(255,255,255,0.06);
border-radius: 10px;
padding: 0.6rem 0.5rem;
text-align: center;
}
.stat-value { font-size: 1.1rem; font-weight: 700; color: var(--primary); }
.stat-label { font-size: 0.7rem; opacity: 0.5; margin-top: 0.1rem; }
/* ---- View tabs ---- */
.view-tabs { display: flex; gap: 0.4rem; flex-wrap: wrap; }
.view-tab {
padding: 0.3rem 0.75rem;
border-radius: 8px;
font-size: 0.85rem;
text-decoration: none;
color: rgba(255,255,255,0.7);
background: rgba(255,255,255,0.08);
transition: background 0.15s;
}
.view-tab:hover { background: rgba(255,255,255,0.14); }
.view-tab.active { background: var(--primary); color: #fff; font-weight: 600; }
/* ---- History heatmap ---- */
.heatmap-section { background: rgba(255,255,255,0.06); border-radius: 12px; padding: 1rem; }
.heatmap-label { font-size: 0.8rem; opacity: 0.5; margin-bottom: 0.6rem; }
.heatmap { display: grid; grid-template-columns: repeat(12, 1fr); gap: 4px; }
.heatmap-cell {
border-radius: 6px;
padding: 0.4rem 0.2rem;
text-align: center;
min-height: 48px;
display: flex; flex-direction: column; align-items: center; justify-content: center;
background: oklch(85.2% 0.199 91.936 / var(--alpha, 0.05));
}
.heatmap-count { font-size: 0.85rem; font-weight: 700; }
.heatmap-month { font-size: 0.65rem; opacity: 0.6; margin-top: 2px; }
/* ---- History month sections ---- */
.history-month { margin-top: 1rem; }
.month-heading { font-size: 0.95rem; font-weight: 600; margin-bottom: 0.5rem; opacity: 0.8; }
.month-count { font-size: 0.8rem; opacity: 0.5; font-weight: 400; }
/* ---- Trends charts ---- */
.trends-section { display: flex; flex-direction: column; gap: 1.25rem; }
.chart-block { background: rgba(255,255,255,0.06); border-radius: 12px; padding: 1rem; }
.chart-label { font-size: 0.8rem; opacity: 0.5; margin-bottom: 0.75rem; }
.bar-chart {
display: flex;
align-items: flex-end;
gap: 4px;
}
.bar-col { flex: 1; display: flex; flex-direction: column; align-items: center; gap: 2px; }
.bar-value { font-size: 0.6rem; color: var(--primary); opacity: 0.9; line-height: 1; }
.bar-fill { width: 100%; background: var(--primary); border-radius: 3px 3px 0 0; min-height: 3px; opacity: 0.8; }
.bar-month { font-size: 0.65rem; opacity: 0.5; }
.director-chart { display: flex; flex-direction: column; gap: 6px; }
.director-row { display: flex; align-items: center; gap: 0.6rem; }
.director-name { font-size: 0.85rem; width: 140px; flex-shrink: 0; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
.director-bar { flex: 1; background: rgba(255,255,255,0.08); border-radius: 4px; height: 10px; overflow: hidden; }
.director-bar-fill { height: 100%; background: var(--primary); border-radius: 4px; opacity: 0.8; }
.director-count { font-size: 0.8rem; opacity: 0.5; width: 20px; text-align: right; }