Compare commits

..

94 Commits

Author SHA1 Message Date
7a66661932 css 2026-05-04 23:24:04 +02:00
b30a6a102b feat: per-page titles, OG/SEO tags, HOST/PORT env vars, BASE_URL in config 2026-05-04 22:38:58 +02:00
38a3aa6bbf fix: update .gitignore to include db-shm and db-wal files
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 22:23:08 +02:00
3135a15cb3 fix: WAL mode + busy_timeout for SQLite, fix rate limiter TOCTOU race 2026-05-04 22:10:19 +02:00
d083f8ae3d refactor: use constant for minimum password length and API rate limit
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 21:41:07 +02:00
874c406d4a fix: security hardening — SameSite=Strict, Secure cookie flag, password min length, generic registration error, auth rate limiting 2026-05-04 21:38:23 +02:00
78e1f4ef72 clean up 2026-05-04 21:24:44 +02:00
cf74b06b4a fix: use pixel bar heights and show avg rating values in trends chart 2026-05-04 21:22:47 +02:00
317898d51b fix: count distinct movies per user in users list, not total reviews 2026-05-04 21:10:32 +02:00
790bb6fbb5 fix: read BASE_URL from env for RSS channel link 2026-05-04 21:06:51 +02:00
658df38788 fix: move rss_url after user lookup, extract RSS_FEED_LIMIT constant 2026-05-04 21:05:08 +02:00
cff0f854fa feat: point RSS nav link to user feed when on profile page 2026-05-04 21:00:31 +02:00
66ade70273 feat: add GET /users/{id}/feed.rss per-user RSS feed handler 2026-05-04 20:58:20 +02:00
cbd2ac5b3e feat: add rss_url to HtmlPageContext, use it in nav 2026-05-04 20:55:31 +02:00
0433cd4d9b fix: remove unused feed_title from RssAdapter 2026-05-04 20:54:32 +02:00
b5a8ea2395 feat: add title param to render_feed, use dynamic title in RSS adapter 2026-05-04 20:52:07 +02:00
49b79799c1 feat: add user_id filter to GetDiaryQuery and get_diary use case 2026-05-04 20:49:31 +02:00
f4aba551a2 fix: derive heatmap color from primary instead of hardcoded blue 2026-05-04 20:38:13 +02:00
91df35dbd3 fix: count distinct movies in user stats, not total reviews 2026-05-04 20:35:48 +02:00
623f90e43f fix: remove timezone-broken future-date check from Review::new 2026-05-04 20:29:11 +02:00
e28f628c80 fix: remove redundant 'common' section from documentation 2026-05-04 20:28:06 +02:00
60c25d4c24 fix: update test assertion for new default page limit 2026-05-04 20:18:04 +02:00
22aafe99be fix: set domain DEFAULT_LIMIT to 5 for pagination 2026-05-04 20:16:18 +02:00
0ff22cca5f fix: remove email from top bar nav 2026-05-04 20:13:14 +02:00
ccc39e27e4 fix: lower default page limit to 5 2026-05-04 20:11:55 +02:00
76319756f4 feat: add chrono dependency to Cargo.lock 2026-05-04 19:48:40 +02:00
7703227970 fix: add missing trait stubs to test mock impls 2026-05-04 19:23:56 +02:00
b9933bb48d feat: add profile/feed/chart CSS styles 2026-05-04 19:17:39 +02:00
0c48708ce6 fix: has_more overflow, magic constant, remove dead get_index handler 2026-05-04 19:15:42 +02:00
a2a889bced feat: wire activity feed, users list, and profile page handlers 2026-05-04 19:12:06 +02:00
a4846f3bea fix: pagination underflow, remove |safe, move bar_height_pct to adapter 2026-05-04 19:09:28 +02:00
27be840faa fix: adjust domain accessors and template adapter for Askama compatibility 2026-05-04 19:03:48 +02:00
965fc0eda8 feat: add activity feed, users, and profile HTML templates 2026-05-04 19:03:44 +02:00
d700b85337 fix: correct relative_time future guard, heatmap exact match, max clarity 2026-05-04 18:57:17 +02:00
ffbab75910 feat: add Askama template structs for feed/users/profile 2026-05-04 18:55:18 +02:00
dda7c40f7f fix: validate view param, document V1 history load 2026-05-04 18:53:26 +02:00
1b827b1bdd feat: add activity feed/users/profile use cases and port methods 2026-05-04 18:48:16 +02:00
1ee6873a60 fix: address code review issues in SQLite adapter 2026-05-04 18:46:31 +02:00
7352b533ff feat: implement feed/stats/history/trends SQLite queries 2026-05-04 18:42:45 +02:00
85e254fee2 feat: impl UserRepository::list_with_stats 2026-05-04 18:40:58 +02:00
fa8221322d feat: add feed/stats SQLite row types 2026-05-04 18:32:59 +02:00
38da37de55 feat: add feed/profile/stats port methods to repositories 2026-05-04 18:30:01 +02:00
f3dedbad8a fix: use UserId newtype in UserSummary instead of raw Uuid 2026-05-04 18:29:10 +02:00
d468ce131f feat: add feed/profile domain models, extend DiaryFilter 2026-05-04 18:26:55 +02:00
d034af9e9c feat: update color scheme to use primary variables for consistency in styling 2026-05-04 17:47:00 +02:00
59d308f41b feat: enhance movie rating display with star icons and improved styling 2026-05-04 15:43:02 +02:00
bbb2ee00d6 feat: enhance styling and layout; add background image and improve UI elements 2026-05-04 15:39:15 +02:00
5dd9aac68d feat: add Dockerfile, .dockerignore, and README; remove common crate
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 15:19:29 +02:00
6dcc4c8317 Refactor movie review logging and resolution strategies
- Introduced `MovieResolver` and associated strategies for resolving movie data based on external metadata ID, manual title, or manual entry.
- Updated `log_review` use case to utilize the new `MovieResolver` for fetching movie details.
- Simplified the `LogReviewData` structure and its conversion to `LogReviewCommand`.
- Enhanced error handling for date parsing in review forms and requests.
- Updated dependencies in `Cargo.toml` and `Cargo.lock` to include necessary crates for async operations.
- Added tests for new functionality in `movie_resolver.rs` to ensure correct behavior of resolution strategies.
2026-05-04 15:08:04 +02:00
e31d99a240 fix(tests): add missing trait methods to PanicRepo mocks 2026-05-04 14:37:48 +02:00
41fec1efa5 fix(presentation): restore user_id in get_new_review_page for nav bar 2026-05-04 14:34:46 +02:00
160c08d1c4 fix(presentation): pass None user_id for non-diary page contexts 2026-05-04 14:32:30 +02:00
7aa6d7bf4d feat(template): add user_id to HtmlPageContext and delete button to diary 2026-05-04 14:31:12 +02:00
144f2f8e0c fix(presentation): use {id} syntax in delete review route 2026-05-04 14:29:24 +02:00
cff64f7a6b feat(presentation): add POST /reviews/:id/delete handler and route 2026-05-04 14:27:43 +02:00
5baff54cb9 feat(presentation): add DELETE /api/reviews/:id handler and route 2026-05-04 14:24:48 +02:00
f94d2db8b1 feat(sqlite): implement get_review_by_id, delete_review, delete_movie 2026-05-04 14:21:25 +02:00
48875a6e86 feat(application): add DeleteReviewCommand and delete_review use case 2026-05-04 14:17:13 +02:00
9387ae705b feat(domain): add get_review_by_id to MovieRepository 2026-05-04 14:15:04 +02:00
9871e21bc0 feat(gitignore): add .superpowers and docs/ to .gitignore 2026-05-04 14:00:56 +02:00
fa8efbaa23 feat(database): remove unused SQL queries and update Cargo dependencies 2026-05-04 14:00:33 +02:00
d769a5b55c feat(css): add monospace minimal stylesheet 2026-05-04 13:38:57 +02:00
8e1fb1a974 feat(routes): replace /diary with /, add login/logout/register/reviews/new routes 2026-05-04 13:38:19 +02:00
6145b873f5 feat(handlers): add HTML handlers for login, logout, register, new review, diary index 2026-05-04 13:38:16 +02:00
cc668ae44d feat(dtos): add LoginForm, RegisterForm, ErrorQuery 2026-05-04 13:38:14 +02:00
e5097c22dd feat(extractors): add OptionalCookieUser and RequiredCookieUser 2026-05-04 13:34:31 +02:00
450468ef3d feat(templates): add base layout, login, register, new_review templates; update diary 2026-05-04 13:30:33 +02:00
6e7c6467a7 feat(domain): add find_by_id to UserRepository + SQLite impl 2026-05-04 13:28:20 +02:00
7f815f8207 feat(ports): extend HtmlRenderer with page context and new render methods 2026-05-04 13:20:30 +02:00
5df89200d4 docs: add frontend HTML design spec 2026-05-04 13:19:21 +02:00
eb273dc277 fix(database): update database connection to use DATABASE_URL with SqliteConnectOptions 2026-05-04 12:54:28 +02:00
5689db0ad7 feat(wiring): wire PosterSyncHandler into event channel in main.rs 2026-05-04 12:44:53 +02:00
5c70b8b8be fix(event-handlers): expect over unwrap, panic-stub comment, fix deprecated chrono call 2026-05-04 12:42:03 +02:00
4c547df04e feat(presentation): implement PosterSyncHandler with retry 2026-05-04 12:37:57 +02:00
602df8df22 feat(application): derive Clone on SyncPosterCommand 2026-05-04 12:35:10 +02:00
5b69a3a7c0 test(event-publisher): fix flaky sleep synchronization in EventWorker tests 2026-05-04 12:34:29 +02:00
a38f78d261 feat(event-publisher): add EventHandler trait and fan-out in EventWorker 2026-05-04 12:32:44 +02:00
17f90726e8 feat(event-publisher): add event publisher adapter with configuration and integration 2026-05-04 12:30:42 +02:00
563f33212e docs: event-driven poster sync implementation plan 2026-05-04 12:30:20 +02:00
8e5ac9f433 docs: event-driven poster sync design spec 2026-05-04 12:24:52 +02:00
f790fa2a0f feat(rss): implement RSS feed adapter and integrate with application state 2026-05-04 12:03:17 +02:00
edcf3c1170 feat(poster-fetcher): add poster fetcher adapter with configuration and integration 2026-05-04 11:51:20 +02:00
1985d2c57f feat(poster-storage): implement S3/Minio storage adapter and configuration
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 11:44:44 +02:00
f0b3d8ad90 feat(log_review): add manual title resolution for movie lookup 2026-05-04 11:24:18 +02:00
da72ab1446 feat(metadata): Implement OMDB metadata provider and refactor metadata client
- Added `OmdbProvider` to fetch movie metadata from the OMDB API.
- Refactored `MetadataClient` to use `MetadataSearchCriteria` for fetching movie metadata.
- Updated `MetadataClientImpl` to support fetching metadata using OMDB.
- Modified `log_review` use case to utilize the new metadata fetching mechanism.
- Updated tests and presentation layer to accommodate changes in metadata handling.
- Added dependencies for `reqwest` and `async-trait` in relevant `Cargo.toml` files.
2026-05-04 11:19:51 +02:00
93c65cd155 feat(auth): implement JWT authentication and user registration
- Added JWT authentication with token generation and validation.
- Introduced user registration functionality with email and password.
- Integrated Argon2 for password hashing.
- Created SQLite user repository for user data persistence.
- Updated application context to include user repository and configuration settings.
- Added environment variable support for JWT secret and registration allowance.
- Enhanced error handling for unauthorized access and validation errors.
- Updated presentation layer to handle login and registration requests.
2026-05-04 10:43:07 +02:00
ba42d3d445 refactor(tests): remove unused models from api_test 2026-05-04 09:35:54 +02:00
819332522a cargo lock 2026-05-04 09:30:58 +02:00
79a06e6844 presentation wiring
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-05-04 09:30:20 +02:00
97a496553a chore: ignore .worktrees/ directory 2026-05-04 02:43:32 +02:00
5a58625265 feat(presentation): add initial structure with dtos, errors, extractors, handlers, and routes modules 2026-05-04 02:11:33 +02:00
6d9ac07dfc refactor(template-askama): clean up comments and improve code readability 2026-05-04 02:05:13 +02:00
b6a7cf9417 feat(template-askama): add Askama template adapter for diary entries 2026-05-04 02:04:52 +02:00
c4b39c9410 feat(sqlite): implement movie and review management with migrations
- Added SQL migrations for movies and reviews tables.
- Implemented SqliteMovieRepository with methods for upserting movies, saving reviews, and querying diary entries.
- Introduced models for database rows and conversion to domain models.
- Integrated async migration handling in the repository.
- Updated Cargo.toml files to include necessary dependencies for async operations and HTTP handling.
2026-05-04 01:59:52 +02:00
125 changed files with 10103 additions and 170 deletions

2
.cargo/config.toml Normal file
View File

@@ -0,0 +1,2 @@
[env]
SQLX_OFFLINE = "true"

10
.dockerignore Normal file
View File

@@ -0,0 +1,10 @@
target/
.git/
.env
*.db
*.db-shm
*.db-wal
.cargo/
.sqlx/
docs/
dev.db

View File

@@ -0,0 +1,13 @@
DATABASE_URL=sqlite:./dev.db
BASE_URL=http://localhost:3000
PORT=3000
SECURE_COOKIES=false
JWT_SECRET=
JWT_TTL_SECONDS=
ALLOW_REGISTRATION=true
OMDB_API_KEY=
POSTER_FETCH_TIMEOUT_SECONDS=30
MINIO_ENDPOINT=
MINIO_ACCESS_KEY_ID=
MINIO_SECRET_ACCESS_KEY=
MINIO_BUCKET=

8
.gitignore vendored
View File

@@ -6,3 +6,11 @@
.env
.env.prod
*.db
*db-shm
*db-wal
.worktrees/
.superpowers/
docs/

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "01a08873b7fa815ad98a56a0902b60414cfcdc2c7a8570351320c4bc425347c6"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "026e2afeb573707cb360fcdab8f6137aabfaf603b5ed57b98ac2888b4a0389ff"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false
]
},
"hash": "0963b9661182e139cd760bbabb0d6ea3a301a2a3adbdfdda4a88f333a1144c77"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews WHERE user_id = ?",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
}

View File

@@ -0,0 +1,98 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,\n u.email AS user_email\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n INNER JOIN users u ON u.id = r.user_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
},
{
"name": "user_email",
"ordinal": 13,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false,
false
]
},
"hash": "217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE title = ? AND release_year = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "3047579c6ed13ce87aad9b9ce6300c02f0df3516979518976e13f9d9abc6a403"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "33d0dae7d16b0635c1c7eb5afd10824bb55af7cc7a854f590d326622863759d1"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "47f7cf95ce3450635b643ab710cadba96f40319140834d510bc5207b2552e055"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews WHERE movie_id = ?",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "4b3074b532342c6356ee0e8e4d8c4a830f016234bb690e1f6240f02824d6d84f"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT strftime('%Y-%m', watched_at) AS month\n FROM reviews\n WHERE user_id = ?\n GROUP BY month\n ORDER BY COUNT(*) DESC\n LIMIT 1",
"describe": {
"columns": [
{
"name": "month",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "630e092fcd33bc312befef352a98225e6e18e6079644b949258a39bf4b0fe3e5"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE external_metadata_id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "7bc4aebcb94547976d3d7e063e4e908fc22b977b3cbf063ee93ffe4648c42011"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n external_metadata_id = excluded.external_metadata_id,\n title = excluded.title,\n release_year = excluded.release_year,\n director = excluded.director,\n poster_path = excluded.poster_path",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "7d7e23355ee0e442f2aa27e898dcfa40bdc4b09391afe04325f076157d9d84aa"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(DISTINCT movie_id) AS \"total!: i64\",\n AVG(CAST(rating AS REAL)) AS avg_rating\n FROM reviews WHERE user_id = ?",
"describe": {
"columns": [
{
"name": "total!: i64",
"ordinal": 0,
"type_info": "Integer"
},
{
"name": "avg_rating",
"ordinal": 1,
"type_info": "Float"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true
]
},
"hash": "a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.rating DESC, r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT m.director AS \"director!\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 5",
"describe": {
"columns": [
{
"name": "director!",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "count!: i64",
"ordinal": 1,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true,
false
]
},
"hash": "aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "af883f8b78f185077e2d3dcfaa0a6e62fbdfbf00c97c9b33b699dc631476181d"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "affe1eb261283c09d4b1ce6e684681755f079a044ffec8ff2bd79cfd8efe16b8"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT m.director\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 1",
"describe": {
"columns": [
{
"name": "director",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM movies WHERE id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "SELECT u.id,\n u.email,\n COUNT(DISTINCT r.movie_id) AS \"total_movies!: i64\",\n AVG(CAST(r.rating AS REAL)) AS avg_rating\n FROM users u\n LEFT JOIN reviews r ON r.user_id = u.id\n GROUP BY u.id, u.email\n ORDER BY u.email ASC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "total_movies!: i64",
"ordinal": 2,
"type_info": "Integer"
},
{
"name": "avg_rating",
"ordinal": 3,
"type_info": "Float"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
true
]
},
"hash": "f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM reviews WHERE id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT strftime('%Y-%m', watched_at) AS \"month!\",\n AVG(CAST(rating AS REAL)) AS \"avg_rating!: f64\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews\n WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')\n GROUP BY \"month!\"\n ORDER BY \"month!\" ASC",
"describe": {
"columns": [
{
"name": "month!",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "avg_rating!: f64",
"ordinal": 1,
"type_info": "Float"
},
{
"name": "count!: i64",
"ordinal": 2,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true,
false,
false
]
},
"hash": "fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317"
}

1636
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,14 @@
[workspace]
members = [
"crates/adapters/auth",
"crates/adapters/event-publisher",
"crates/adapters/metadata",
"crates/adapters/poster-fetcher",
"crates/adapters/poster-storage",
"crates/adapters/rss",
"crates/adapters/sqlite",
"crates/adapters/template-askama",
"crates/application",
"crates/common",
"crates/domain",
"crates/presentation",
]
@@ -13,6 +16,7 @@ resolver = "2"
[workspace.dependencies]
tokio = { version = "1.0", features = ["full"] }
dotenvy = "0.15"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
anyhow = "1.0"
@@ -22,12 +26,23 @@ tracing-subscriber = { version = "0.3.23", features = ["env-filter"] }
async-trait = "0.1"
uuid = { version = "1.23.0", features = ["v4", "serde"] }
chrono = { version = "0.4", features = ["serde"] }
sqlx = { version = "0.8.6", features = [
"runtime-tokio-rustls",
"sqlite",
"uuid",
"macros",
] }
reqwest = { version = "0.13", features = ["json", "query"] }
object_store = { version = "0.11", features = ["aws"] }
domain = { path = "crates/domain" }
common = { path = "crates/common" }
application = { path = "crates/application" }
presentation = { path = "crates/presentation" }
auth = { path = "crates/adapters/auth" }
metadata = { path = "crates/adapters/metadata" }
poster-fetcher = { path = "crates/adapters/poster-fetcher" }
poster-storage = { path = "crates/adapters/poster-storage" }
event-publisher = { path = "crates/adapters/event-publisher" }
rss = { path = "crates/adapters/rss" }
sqlite = { path = "crates/adapters/sqlite" }
template-askama = { path = "crates/adapters/template-askama" }

57
Dockerfile Normal file
View File

@@ -0,0 +1,57 @@
# ----- build -----
FROM rust:slim-bookworm AS builder
RUN apt-get update && apt-get install -y --no-install-recommends sqlite3 && rm -rf /var/lib/apt/lists/*
WORKDIR /build
# Cache dependency compilation separately from source
COPY Cargo.toml Cargo.lock ./
COPY crates/adapters/auth/Cargo.toml crates/adapters/auth/Cargo.toml
COPY crates/adapters/event-publisher/Cargo.toml crates/adapters/event-publisher/Cargo.toml
COPY crates/adapters/metadata/Cargo.toml crates/adapters/metadata/Cargo.toml
COPY crates/adapters/poster-fetcher/Cargo.toml crates/adapters/poster-fetcher/Cargo.toml
COPY crates/adapters/poster-storage/Cargo.toml crates/adapters/poster-storage/Cargo.toml
COPY crates/adapters/rss/Cargo.toml crates/adapters/rss/Cargo.toml
COPY crates/adapters/sqlite/Cargo.toml crates/adapters/sqlite/Cargo.toml
COPY crates/adapters/template-askama/Cargo.toml crates/adapters/template-askama/Cargo.toml
COPY crates/application/Cargo.toml crates/application/Cargo.toml
COPY crates/domain/Cargo.toml crates/domain/Cargo.toml
COPY crates/presentation/Cargo.toml crates/presentation/Cargo.toml
# Stub every crate so cargo can resolve and fetch deps
RUN find crates -name "Cargo.toml" | sed 's|/Cargo.toml||' | \
xargs -I{} sh -c 'mkdir -p {}/src && echo "fn main(){}" > {}/src/main.rs && echo "" > {}/src/lib.rs'
RUN cargo fetch
# Now copy real sources (invalidates cache only on source changes)
COPY crates ./crates
# sqlx macros verify queries at compile time; create a real DB from migrations
RUN sqlite3 /build/dev.db \
< crates/adapters/sqlite/migrations/0001_initial.sql && \
sqlite3 /build/dev.db \
< crates/adapters/sqlite/migrations/0002_users.sql
ENV DATABASE_URL=sqlite:///build/dev.db
RUN cargo build --release -p presentation
# ----- runtime -----
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY --from=builder /build/target/release/presentation ./presentation
COPY static ./static
EXPOSE 3000
ENV RUST_LOG=presentation=info,tower_http=info
CMD ["./presentation"]

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Gabriel Kaszewski
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

85
README.md Normal file
View File

@@ -0,0 +1,85 @@
# Movies Diary
A self-hosted, server-side rendered movie logging system. Built in Rust — no JavaScript, no SPA, just HTML forms and an RSS feed. Designed to run as a lightweight widget embedded on a personal site.
## Features
- Log movies with a TMDB/OMDb ID and a 05 rating
- Immutable append-only viewing ledger (tracks re-watches)
- Background poster fetching and storage (S3-compatible)
- RSS/Atom feed for public subscription
- JWT authentication via cookie (HTML) or Bearer token (REST API)
- Zero JavaScript
## Architecture
Hexagonal (Ports & Adapters) with Domain-Driven Design:
```
domain — pure types and trait definitions, no external deps
application — use cases / business logic orchestration
presentation — Axum HTTP router, wires all adapters together
adapters/
auth — JWT issuance and validation (Argon2 passwords)
sqlite — SQLite repository via sqlx
metadata — OMDb HTTP client
poster-fetcher — downloads poster images
poster-storage — uploads posters to S3-compatible storage
template-askama — Askama HTML rendering
rss — RSS/Atom feed generation
event-publisher — async event channel for background poster sync
```
## Prerequisites
- Rust (stable, 2024 edition)
- SQLite
- An S3-compatible object store (e.g. MinIO) for poster storage
- An [OMDb API key](https://www.omdbapi.com/apikey.aspx)
## Environment Variables
Copy and fill in the following (e.g. in a `.env` file):
```env
# Database
DATABASE_URL=sqlite://movies.db
# Authentication
JWT_SECRET=change-me
JWT_TTL_SECONDS=86400
# OMDb metadata
OMDB_API_KEY=your-key
# Poster storage (S3-compatible)
MINIO_ENDPOINT=http://localhost:9000
MINIO_BUCKET=posters
MINIO_REGION=us-east-1
MINIO_ACCESS_KEY_ID=minioadmin
MINIO_SECRET_ACCESS_KEY=minioadmin
# Optional
ALLOW_REGISTRATION=false
POSTER_FETCH_TIMEOUT_SECONDS=10
EVENT_CHANNEL_BUFFER=32
RUST_LOG=presentation=debug,tower_http=debug
```
## Run
```bash
cargo run -p presentation
```
Server listens on `0.0.0.0:3000`.
## Test
```bash
cargo test
```
## License
MIT License. See [LICENSE](LICENSE).

View File

@@ -4,3 +4,12 @@ version = "0.1.0"
edition = "2024"
[dependencies]
async-trait = { workspace = true }
domain = { workspace = true }
anyhow = { workspace = true }
chrono = { workspace = true }
uuid = { workspace = true }
serde = { version = "1.0", features = ["derive"] }
jsonwebtoken = "9"
argon2 = { version = "0.5", features = ["std"] }
rand_core = { version = "0.6", features = ["getrandom"] }

View File

@@ -1,14 +1,104 @@
pub fn add(left: u64, right: u64) -> u64 {
left + right
use async_trait::async_trait;
use argon2::{
Argon2,
password_hash::{PasswordHasher as _, PasswordVerifier, SaltString},
};
use chrono::{Duration, Utc};
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation, decode, encode};
use rand_core::OsRng;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use domain::{
errors::DomainError,
ports::{AuthService, GeneratedToken, PasswordHasher},
value_objects::{PasswordHash, UserId},
};
pub struct AuthConfig {
secret: String,
ttl_seconds: u64,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
impl AuthConfig {
pub fn from_env() -> anyhow::Result<Self> {
let secret = std::env::var("JWT_SECRET")
.map_err(|_| anyhow::anyhow!("JWT_SECRET env var is required"))?;
if secret.is_empty() {
anyhow::bail!("JWT_SECRET must not be empty");
}
let ttl_seconds = std::env::var("JWT_TTL_SECONDS")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(86400u64);
Ok(Self { secret, ttl_seconds })
}
}
#[derive(Serialize, Deserialize)]
struct Claims {
sub: String,
exp: u64,
}
pub struct JwtAuthService {
config: AuthConfig,
}
impl JwtAuthService {
pub fn new(config: AuthConfig) -> Self {
Self { config }
}
}
#[async_trait]
impl AuthService for JwtAuthService {
async fn generate_token(&self, user_id: &UserId) -> Result<GeneratedToken, DomainError> {
let expires_at = Utc::now() + Duration::seconds(self.config.ttl_seconds as i64);
let claims = Claims {
sub: user_id.value().to_string(),
exp: expires_at.timestamp() as u64,
};
let token = encode(
&Header::default(),
&claims,
&EncodingKey::from_secret(self.config.secret.as_bytes()),
)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(GeneratedToken { token, expires_at })
}
async fn validate_token(&self, token: &str) -> Result<UserId, DomainError> {
let data = decode::<Claims>(
token,
&DecodingKey::from_secret(self.config.secret.as_bytes()),
&Validation::default(),
)
.map_err(|_| DomainError::Unauthorized("Invalid or expired token".into()))?;
let uuid = Uuid::parse_str(&data.claims.sub)
.map_err(|_| DomainError::Unauthorized("Invalid token subject".into()))?;
Ok(UserId::from_uuid(uuid))
}
}
pub struct Argon2PasswordHasher;
#[async_trait]
impl PasswordHasher for Argon2PasswordHasher {
async fn hash(&self, plain_password: &str) -> Result<PasswordHash, DomainError> {
let salt = SaltString::generate(&mut OsRng);
let hash = Argon2::default()
.hash_password(plain_password.as_bytes(), &salt)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
.to_string();
PasswordHash::new(hash).map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
async fn verify(&self, plain_password: &str, hash: &PasswordHash) -> Result<bool, DomainError> {
let parsed = argon2::password_hash::PasswordHash::new(hash.value())
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(Argon2::default()
.verify_password(plain_password.as_bytes(), &parsed)
.is_ok())
}
}

View File

@@ -0,0 +1,10 @@
[package]
name = "event-publisher"
version = "0.1.0"
edition = "2024"
[dependencies]
domain = { workspace = true }
async-trait = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }

View File

@@ -0,0 +1,209 @@
use async_trait::async_trait;
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
use tokio::sync::mpsc;
pub struct EventPublisherConfig {
pub channel_buffer: usize,
}
impl EventPublisherConfig {
pub fn from_env() -> Self {
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(128);
Self { channel_buffer }
}
}
#[async_trait]
pub trait EventHandler: Send + Sync {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
}
pub struct ChannelEventPublisher {
sender: mpsc::Sender<DomainEvent>,
}
#[async_trait]
impl EventPublisher for ChannelEventPublisher {
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
self.sender
.send(event.clone())
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
}
pub struct EventWorker {
receiver: mpsc::Receiver<DomainEvent>,
handlers: Vec<Box<dyn EventHandler>>,
}
impl EventWorker {
pub async fn run(mut self) {
while let Some(event) = self.receiver.recv().await {
match &event {
DomainEvent::ReviewLogged {
review_id,
movie_id,
user_id,
rating,
watched_at,
} => {
tracing::info!(
review_id = %review_id.value(),
movie_id = %movie_id.value(),
user_id = %user_id.value(),
rating = rating.value(),
watched_at = %watched_at,
"event: review_logged"
);
}
DomainEvent::MovieDiscovered {
movie_id,
external_metadata_id,
} => {
tracing::info!(
movie_id = %movie_id.value(),
external_id = external_metadata_id.value(),
"event: movie_discovered"
);
}
}
for handler in &self.handlers {
if let Err(e) = handler.handle(&event).await {
tracing::error!("event handler error: {e}");
}
}
}
tracing::info!("event worker shut down");
}
}
pub struct NoopEventPublisher;
#[async_trait]
impl EventPublisher for NoopEventPublisher {
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
Ok(())
}
}
pub fn create_event_channel(
config: EventPublisherConfig,
handlers: Vec<Box<dyn EventHandler>>,
) -> (ChannelEventPublisher, EventWorker) {
let (tx, rx) = mpsc::channel(config.channel_buffer);
(
ChannelEventPublisher { sender: tx },
EventWorker {
receiver: rx,
handlers,
},
)
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::{Arc, Mutex};
use domain::{
errors::DomainError,
events::DomainEvent,
value_objects::{ExternalMetadataId, MovieId},
};
struct RecordingHandler {
calls: Arc<Mutex<Vec<String>>>,
}
#[async_trait]
impl EventHandler for RecordingHandler {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
let label = match event {
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
DomainEvent::ReviewLogged { .. } => "review_logged",
};
self.calls.lock().unwrap().push(label.to_string());
Ok(())
}
}
#[tokio::test]
async fn single_handler_receives_event() {
let calls = Arc::new(Mutex::new(vec![]));
let handler = RecordingHandler { calls: Arc::clone(&calls) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
let handle = tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
drop(publisher);
handle.await.unwrap();
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
}
#[tokio::test]
async fn multiple_handlers_all_receive_event() {
let calls1 = Arc::new(Mutex::new(vec![]));
let calls2 = Arc::new(Mutex::new(vec![]));
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(
config,
vec![Box::new(handler1), Box::new(handler2)],
);
let handle = tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
drop(publisher);
handle.await.unwrap();
assert_eq!(calls1.lock().unwrap().len(), 1);
assert_eq!(calls2.lock().unwrap().len(), 1);
}
#[tokio::test]
async fn handler_error_does_not_stop_worker() {
struct FailingHandler;
#[async_trait]
impl EventHandler for FailingHandler {
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
Err(DomainError::InfrastructureError("boom".into()))
}
}
let calls = Arc::new(Mutex::new(vec![]));
let good = RecordingHandler { calls: Arc::clone(&calls) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(
config,
vec![Box::new(FailingHandler), Box::new(good)],
);
let handle = tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
drop(publisher);
handle.await.unwrap();
assert_eq!(calls.lock().unwrap().len(), 1);
}
}

View File

@@ -4,3 +4,7 @@ version = "0.1.0"
edition = "2024"
[dependencies]
async-trait = { workspace = true }
reqwest = { workspace = true }
serde = { workspace = true }
domain = { workspace = true }

View File

@@ -1,14 +1,54 @@
pub fn add(left: u64, right: u64) -> u64 {
left + right
use async_trait::async_trait;
use domain::{
errors::DomainError,
models::Movie,
ports::{MetadataClient, MetadataSearchCriteria},
value_objects::{ExternalMetadataId, MovieTitle, PosterUrl, ReleaseYear},
};
mod omdb;
pub(crate) struct ProviderMovie {
pub imdb_id: ExternalMetadataId,
pub title: MovieTitle,
pub release_year: ReleaseYear,
pub director: Option<String>,
pub poster_url: Option<PosterUrl>,
}
#[cfg(test)]
mod tests {
use super::*;
#[async_trait]
pub(crate) trait MetadataProvider: Send + Sync {
async fn fetch(&self, criteria: &MetadataSearchCriteria) -> Result<ProviderMovie, DomainError>;
}
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
pub struct MetadataClientImpl {
provider: Box<dyn MetadataProvider>,
}
impl MetadataClientImpl {
pub fn new_omdb(api_key: String) -> Self {
Self {
provider: Box::new(omdb::OmdbProvider::new(api_key)),
}
}
}
#[async_trait]
impl MetadataClient for MetadataClientImpl {
async fn fetch_movie_metadata(
&self,
criteria: &MetadataSearchCriteria,
) -> Result<Movie, DomainError> {
let pm = self.provider.fetch(criteria).await?;
Ok(Movie::new(Some(pm.imdb_id), pm.title, pm.release_year, pm.director, None))
}
async fn get_poster_url(
&self,
external_metadata_id: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
let criteria = MetadataSearchCriteria::ImdbId(external_metadata_id.clone());
let pm = self.provider.fetch(&criteria).await?;
Ok(pm.poster_url)
}
}

View File

@@ -0,0 +1,119 @@
use async_trait::async_trait;
use domain::{
errors::DomainError,
ports::MetadataSearchCriteria,
value_objects::{ExternalMetadataId, MovieTitle, PosterUrl, ReleaseYear},
};
use serde::Deserialize;
use crate::{MetadataProvider, ProviderMovie};
pub(crate) struct OmdbProvider {
client: reqwest::Client,
api_key: String,
base_url: String,
}
impl OmdbProvider {
pub(crate) fn new(api_key: String) -> Self {
Self {
client: reqwest::Client::new(),
api_key,
base_url: "http://www.omdbapi.com/".to_string(),
}
}
}
#[derive(Deserialize)]
struct OmdbResponse {
#[serde(rename = "Title")]
title: String,
#[serde(rename = "Year")]
year: String,
#[serde(rename = "Director")]
director: String,
#[serde(rename = "Poster")]
poster: String,
#[serde(rename = "imdbID")]
imdb_id: String,
#[serde(rename = "Response")]
response: String,
#[serde(rename = "Error")]
error: Option<String>,
}
#[async_trait]
impl MetadataProvider for OmdbProvider {
async fn fetch(&self, criteria: &MetadataSearchCriteria) -> Result<ProviderMovie, DomainError> {
let mut url = reqwest::Url::parse(&self.base_url)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
{
let mut params = url.query_pairs_mut();
params.append_pair("apikey", &self.api_key);
match criteria {
MetadataSearchCriteria::ImdbId(id) => {
params.append_pair("i", id.value());
}
MetadataSearchCriteria::Title { title, year } => {
params.append_pair("t", title);
if let Some(y) = year {
params.append_pair("y", &y.to_string());
}
}
}
}
let http_resp = self
.client
.get(url)
.send()
.await
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?
.error_for_status()
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?;
let resp: OmdbResponse = http_resp
.json()
.await
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?;
if resp.response != "True" {
let msg = resp.error.unwrap_or_default();
return if msg.to_lowercase().contains("not found") {
Err(DomainError::NotFound(msg))
} else {
Err(DomainError::InfrastructureError(msg))
};
}
let year: u16 = resp
.year
.chars()
.take(4)
.collect::<String>()
.parse()
.map_err(|_| {
DomainError::InfrastructureError(format!("Unparseable year: {}", resp.year))
})?;
let imdb_id = ExternalMetadataId::new(resp.imdb_id)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let title = MovieTitle::new(resp.title)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let release_year = ReleaseYear::new(year)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let director = match resp.director.as_str() {
"N/A" | "" => None,
d => Some(d.to_string()),
};
let poster_url = match resp.poster.as_str() {
"N/A" | "" => None,
url => PosterUrl::new(url.to_string()).ok(),
};
Ok(ProviderMovie { imdb_id, title, release_year, director, poster_url })
}
}

View File

@@ -0,0 +1,10 @@
[package]
name = "poster-fetcher"
version = "0.1.0"
edition = "2021"
[dependencies]
domain = { workspace = true }
async-trait = { workspace = true }
reqwest = { workspace = true }
anyhow = { workspace = true }

View File

@@ -0,0 +1,13 @@
pub struct PosterFetcherConfig {
pub timeout_seconds: u64,
}
impl PosterFetcherConfig {
pub fn from_env() -> Self {
let timeout_seconds = std::env::var("POSTER_FETCH_TIMEOUT_SECONDS")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(30);
Self { timeout_seconds }
}
}

View File

@@ -0,0 +1,38 @@
mod config;
pub use config::PosterFetcherConfig;
use std::time::Duration;
use async_trait::async_trait;
use domain::{errors::DomainError, ports::PosterFetcherClient, value_objects::PosterUrl};
pub struct ReqwestPosterFetcher {
client: reqwest::Client,
}
impl ReqwestPosterFetcher {
pub fn new(config: PosterFetcherConfig) -> anyhow::Result<Self> {
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(config.timeout_seconds))
.build()?;
Ok(Self { client })
}
}
#[async_trait]
impl PosterFetcherClient for ReqwestPosterFetcher {
async fn fetch_poster_bytes(&self, poster_url: &PosterUrl) -> Result<Vec<u8>, DomainError> {
let bytes = self
.client
.get(poster_url.value())
.send()
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
.error_for_status()
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
.bytes()
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(bytes.to_vec())
}
}

View File

@@ -0,0 +1,16 @@
[package]
name = "poster-storage"
version = "0.1.0"
edition = "2024"
[dependencies]
domain = { workspace = true }
anyhow = { workspace = true }
async-trait = { workspace = true }
tracing = { workspace = true }
object_store = { workspace = true }
infer = "0.19.0"
[dev-dependencies]
tokio = { workspace = true }
uuid = { workspace = true }

View File

@@ -0,0 +1,38 @@
use anyhow::Context;
use object_store::{aws::AmazonS3Builder, ObjectStore};
use std::sync::Arc;
pub struct StorageConfig {
endpoint: String,
access_key_id: String,
secret_access_key: String,
bucket: String,
region: String,
}
impl StorageConfig {
pub fn from_env() -> anyhow::Result<Self> {
Ok(Self {
endpoint: std::env::var("MINIO_ENDPOINT").context("MINIO_ENDPOINT required")?,
access_key_id: std::env::var("MINIO_ACCESS_KEY_ID")
.context("MINIO_ACCESS_KEY_ID required")?,
secret_access_key: std::env::var("MINIO_SECRET_ACCESS_KEY")
.context("MINIO_SECRET_ACCESS_KEY required")?,
bucket: std::env::var("MINIO_BUCKET").context("MINIO_BUCKET required")?,
region: std::env::var("MINIO_REGION").unwrap_or_else(|_| "minio".to_string()),
})
}
pub fn build_store(self) -> anyhow::Result<Arc<dyn ObjectStore>> {
let store = AmazonS3Builder::new()
.with_endpoint(self.endpoint)
.with_access_key_id(self.access_key_id)
.with_secret_access_key(self.secret_access_key)
.with_bucket_name(self.bucket)
.with_region(self.region)
.with_allow_http(true)
.build()
.context("Failed to build S3/Minio store")?;
Ok(Arc::new(store))
}
}

View File

@@ -0,0 +1,95 @@
mod config;
pub use config::StorageConfig;
use async_trait::async_trait;
use domain::{
errors::DomainError,
ports::PosterStorage,
value_objects::{MovieId, PosterPath},
};
use object_store::{Attribute, Attributes, PutOptions, path::Path, ObjectStore};
use std::sync::Arc;
fn detect_mime(bytes: &[u8]) -> &'static str {
infer::get(bytes)
.map(|t| t.mime_type())
.unwrap_or("application/octet-stream")
}
pub struct PosterStorageAdapter {
store: Arc<dyn ObjectStore>,
}
impl PosterStorageAdapter {
pub fn new(store: Arc<dyn ObjectStore>) -> Self {
Self { store }
}
pub fn from_config(config: StorageConfig) -> anyhow::Result<Self> {
Ok(Self::new(config.build_store()?))
}
}
#[async_trait]
impl PosterStorage for PosterStorageAdapter {
async fn store_poster(
&self,
movie_id: &MovieId,
image_bytes: &[u8],
) -> Result<PosterPath, DomainError> {
let path = Path::from(movie_id.value().to_string());
let mime = detect_mime(image_bytes);
let mut attributes = Attributes::new();
attributes.insert(Attribute::ContentType, mime.into());
let opts = PutOptions { attributes, ..Default::default() };
self.store
.put_opts(&path, image_bytes.to_vec().into(), opts)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
PosterPath::new(path.to_string())
}
async fn get_poster(&self, poster_path: &PosterPath) -> Result<Vec<u8>, DomainError> {
let path = Path::from(poster_path.value().to_string());
let result = self.store.get(&path).await.map_err(|e| match e {
object_store::Error::NotFound { .. } => DomainError::NotFound("Poster not found".into()),
_ => DomainError::InfrastructureError(e.to_string()),
})?;
result
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
}
#[cfg(test)]
mod tests {
use super::*;
use object_store::memory::InMemory;
use uuid::Uuid;
fn adapter() -> PosterStorageAdapter {
PosterStorageAdapter::new(Arc::new(InMemory::new()))
}
#[tokio::test]
async fn store_and_retrieve_round_trip() {
let adapter = adapter();
let movie_id = MovieId::from_uuid(Uuid::new_v4());
let bytes = b"fake-image-bytes";
let path = adapter.store_poster(&movie_id, bytes).await.unwrap();
let retrieved = adapter.get_poster(&path).await.unwrap();
assert_eq!(retrieved, bytes);
}
#[tokio::test]
async fn get_missing_returns_not_found() {
let adapter = adapter();
let path = PosterPath::new("nonexistent".into()).unwrap();
let result = adapter.get_poster(&path).await;
assert!(matches!(result, Err(DomainError::NotFound(_))));
}
}

View File

@@ -4,3 +4,7 @@ version = "0.1.0"
edition = "2024"
[dependencies]
rss-feed = { package = "rss", version = "2" }
chrono = { workspace = true }
domain = { workspace = true }
application = { workspace = true }

View File

@@ -1,5 +1,59 @@
pub fn add(left: u64, right: u64) -> u64 {
left + right
use application::ports::RssFeedRenderer;
use domain::models::DiaryEntry;
use rss_feed::{ChannelBuilder, GuidBuilder, ItemBuilder};
pub struct RssAdapter {
feed_link: String,
}
impl RssAdapter {
pub fn new(feed_link: String) -> Self {
Self { feed_link }
}
}
impl RssFeedRenderer for RssAdapter {
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String> {
let items = entries
.iter()
.map(|e| {
let item_title = format!(
"{} ({})",
e.movie().title().value(),
e.movie().release_year().value()
);
let description = match e.review().comment() {
Some(c) => format!("{}/5 — {}", e.review().rating().value(), c.value()),
None => format!("{}/5", e.review().rating().value()),
};
let pub_date = e
.review()
.watched_at()
.and_utc()
.format("%a, %d %b %Y %H:%M:%S +0000")
.to_string();
let guid = GuidBuilder::default()
.value(e.review().id().value().to_string())
.permalink(false)
.build();
ItemBuilder::default()
.title(Some(item_title))
.description(Some(description))
.pub_date(Some(pub_date))
.guid(Some(guid))
.build()
})
.collect::<Vec<_>>();
let channel = ChannelBuilder::default()
.title(title.to_string())
.link(self.feed_link.clone())
.description(title.to_string())
.items(items)
.build();
Ok(channel.to_string())
}
}
#[cfg(test)]
@@ -7,8 +61,16 @@ mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
fn render_feed_uses_provided_title() {
let adapter = RssAdapter::new("http://example.com".into());
let xml = adapter.render_feed(&[], "Custom Title").unwrap();
assert!(xml.contains("<title>Custom Title</title>"));
}
#[test]
fn render_feed_empty_entries_produces_valid_xml() {
let adapter = RssAdapter::new("http://example.com".into());
let xml = adapter.render_feed(&[], "My Feed").unwrap();
assert!(xml.starts_with("<?xml") || xml.starts_with("<rss"));
}
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "01a08873b7fa815ad98a56a0902b60414cfcdc2c7a8570351320c4bc425347c6"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "026e2afeb573707cb360fcdab8f6137aabfaf603b5ed57b98ac2888b4a0389ff"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false
]
},
"hash": "0963b9661182e139cd760bbabb0d6ea3a301a2a3adbdfdda4a88f333a1144c77"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE title = ? AND release_year = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "3047579c6ed13ce87aad9b9ce6300c02f0df3516979518976e13f9d9abc6a403"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "33d0dae7d16b0635c1c7eb5afd10824bb55af7cc7a854f590d326622863759d1"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "47f7cf95ce3450635b643ab710cadba96f40319140834d510bc5207b2552e055"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews WHERE movie_id = ?",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "4b3074b532342c6356ee0e8e4d8c4a830f016234bb690e1f6240f02824d6d84f"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "630e092fcd33bc312befef352a98225e6e18e6079644b949258a39bf4b0fe3e5"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE external_metadata_id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "7bc4aebcb94547976d3d7e063e4e908fc22b977b3cbf063ee93ffe4648c42011"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n external_metadata_id = excluded.external_metadata_id,\n title = excluded.title,\n release_year = excluded.release_year,\n director = excluded.director,\n poster_path = excluded.poster_path",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "7d7e23355ee0e442f2aa27e898dcfa40bdc4b09391afe04325f076157d9d84aa"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "af883f8b78f185077e2d3dcfaa0a6e62fbdfbf00c97c9b33b699dc631476181d"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "affe1eb261283c09d4b1ce6e684681755f079a044ffec8ff2bd79cfd8efe16b8"
}

View File

@@ -16,3 +16,4 @@ uuid = { workspace = true }
chrono = { workspace = true }
tracing = { workspace = true }
async-trait = { workspace = true }
tokio = { workspace = true }

View File

@@ -0,0 +1,24 @@
CREATE TABLE IF NOT EXISTS movies (
id TEXT PRIMARY KEY NOT NULL,
external_metadata_id TEXT UNIQUE,
title TEXT NOT NULL,
release_year INTEGER NOT NULL,
director TEXT,
poster_path TEXT
);
CREATE INDEX IF NOT EXISTS idx_movies_title_year
ON movies (title, release_year);
CREATE TABLE IF NOT EXISTS reviews (
id TEXT PRIMARY KEY NOT NULL,
movie_id TEXT NOT NULL REFERENCES movies(id),
user_id TEXT NOT NULL,
rating INTEGER NOT NULL,
comment TEXT,
watched_at TEXT NOT NULL,
created_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_reviews_movie_id ON reviews (movie_id);
CREATE INDEX IF NOT EXISTS idx_reviews_watched_at ON reviews (watched_at);

View File

@@ -0,0 +1,6 @@
CREATE TABLE IF NOT EXISTS users (
id TEXT PRIMARY KEY NOT NULL,
email TEXT UNIQUE NOT NULL,
password_hash TEXT NOT NULL,
created_at TEXT NOT NULL
);

View File

@@ -1,12 +1,41 @@
use async_trait::async_trait;
use domain::{
errors::DomainError,
events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
models::{
DiaryEntry, DiaryFilter, DirectorStat, FeedEntry, Movie, MonthlyRating,
Review, ReviewHistory, SortDirection, UserStats, UserTrends,
collections::{PageParams, Paginated},
},
ports::MovieRepository,
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear},
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear, ReviewId, UserId},
};
use sqlx::SqlitePool;
mod migrations;
mod models;
mod users;
use models::{
DiaryRow, DirectorCountRow, FeedRow, MonthlyRatingRow, MovieRow, ReviewRow,
UserTotalsRow, datetime_to_str,
};
pub use users::SqliteUserRepository;
fn format_year_month(ym: &str) -> String {
let parts: Vec<&str> = ym.splitn(2, '-').collect();
if parts.len() != 2 { return ym.to_string(); }
let year = parts[0].get(2..).unwrap_or(parts[0]);
let month = match parts[1] {
"01" => "Jan", "02" => "Feb", "03" => "Mar", "04" => "Apr",
"05" => "May", "06" => "Jun", "07" => "Jul", "08" => "Aug",
"09" => "Sep", "10" => "Oct", "11" => "Nov", "12" => "Dec",
_ => parts[1],
};
format!("{} '{}", month, year)
}
pub struct SqliteMovieRepository {
pool: SqlitePool,
}
@@ -16,23 +45,284 @@ impl SqliteMovieRepository {
Self { pool }
}
pub async fn migrate(&self) -> Result<(), DomainError> {
migrations::run(&self.pool).await
}
fn map_err(e: sqlx::Error) -> DomainError {
tracing::error!("Database error: {:?}", e);
DomainError::InfrastructureError("Database operation failed".into())
}
async fn count_diary_entries(&self, movie_id: Option<&str>) -> Result<i64, DomainError> {
match movie_id {
None => sqlx::query_scalar!("SELECT COUNT(*) FROM reviews")
.fetch_one(&self.pool)
.await
.map_err(Self::map_err),
Some(id) => {
sqlx::query_scalar!("SELECT COUNT(*) FROM reviews WHERE movie_id = ?", id)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
}
}
async fn fetch_all_diary_rows(
&self,
sort: &SortDirection,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
match sort {
// ByRatingDesc only applies to user-scoped queries; falls back to date sort here
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
SortDirection::Ascending => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
ORDER BY r.watched_at ASC
LIMIT ? OFFSET ?",
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
}
}
async fn fetch_movie_diary_rows(
&self,
movie_id: &str,
sort: &SortDirection,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
match sort {
// ByRatingDesc only applies to user-scoped queries; falls back to date sort here
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
movie_id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
SortDirection::Ascending => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at ASC
LIMIT ? OFFSET ?",
movie_id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
}
}
async fn count_user_diary_entries(&self, user_id: &str) -> Result<i64, DomainError> {
sqlx::query_scalar!(
"SELECT COUNT(*) FROM reviews WHERE user_id = ?",
user_id
)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_diary_rows_by_watched(
&self,
user_id: &str,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
user_id, limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_diary_rows_by_rating(
&self,
user_id: &str,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.rating DESC, r.watched_at DESC
LIMIT ? OFFSET ?",
user_id, limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn count_feed_entries(&self) -> Result<i64, DomainError> {
sqlx::query_scalar!("SELECT COUNT(*) FROM reviews")
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_feed_rows(
&self,
limit: i64,
offset: i64,
) -> Result<Vec<FeedRow>, DomainError> {
sqlx::query_as!(
FeedRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,
u.email AS user_email
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
INNER JOIN users u ON u.id = r.user_id
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_totals(&self, user_id: &str) -> Result<UserTotalsRow, DomainError> {
sqlx::query_as!(
UserTotalsRow,
r#"SELECT COUNT(DISTINCT movie_id) AS "total!: i64",
AVG(CAST(rating AS REAL)) AS avg_rating
FROM reviews WHERE user_id = ?"#,
user_id
)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_favorite_director(
&self,
user_id: &str,
) -> Result<Option<String>, DomainError> {
let row = sqlx::query_scalar!(
"SELECT m.director
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ? AND m.director IS NOT NULL
GROUP BY m.director
ORDER BY COUNT(*) DESC
LIMIT 1",
user_id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(row.flatten())
}
async fn fetch_user_most_active_month(
&self,
user_id: &str,
) -> Result<Option<String>, DomainError> {
let result: Option<Option<String>> = sqlx::query_scalar!(
"SELECT strftime('%Y-%m', watched_at) AS month
FROM reviews
WHERE user_id = ?
GROUP BY month
ORDER BY COUNT(*) DESC
LIMIT 1",
user_id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(result.flatten())
}
}
#[async_trait::async_trait]
#[async_trait]
impl MovieRepository for SqliteMovieRepository {
async fn get_movie_by_external_id(
&self,
external_metadata_id: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> {
todo!()
let id = external_metadata_id.value();
sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE external_metadata_id = ?",
id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.map(MovieRow::to_domain)
.transpose()
}
async fn get_movie_by_id(&self, movie_id: &MovieId) -> Result<Option<Movie>, DomainError> {
todo!()
let id = movie_id.value().to_string();
sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE id = ?",
id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.map(MovieRow::to_domain)
.transpose()
}
async fn get_movies_by_title_and_year(
@@ -40,25 +330,312 @@ impl MovieRepository for SqliteMovieRepository {
title: &MovieTitle,
year: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> {
todo!()
let title = title.value();
let year = year.value() as i64;
sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE title = ? AND release_year = ?",
title,
year
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?
.into_iter()
.map(MovieRow::to_domain)
.collect()
}
async fn upsert_movie(&self, movie: &Movie) -> Result<(), DomainError> {
todo!()
let id = movie.id().value().to_string();
let external_metadata_id = movie.external_metadata_id().map(|e| e.value().to_string());
let title = movie.title().value();
let release_year = movie.release_year().value() as i64;
let director = movie.director();
let poster_path = movie.poster_path().map(|p| p.value().to_string());
sqlx::query!(
"INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)
VALUES (?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
external_metadata_id = excluded.external_metadata_id,
title = excluded.title,
release_year = excluded.release_year,
director = excluded.director,
poster_path = excluded.poster_path",
id,
external_metadata_id,
title,
release_year,
director,
poster_path
)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(())
}
async fn save_review(&self, review: &Review) -> Result<DomainEvent, DomainError> {
todo!()
let id = review.id().value().to_string();
let movie_id = review.movie_id().value().to_string();
let user_id = review.user_id().value().to_string();
let rating = review.rating().value() as i64;
let comment = review.comment().map(|c| c.value().to_string());
let watched_at = datetime_to_str(review.watched_at());
let created_at = datetime_to_str(review.created_at());
sqlx::query!(
"INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?)",
id,
movie_id,
user_id,
rating,
comment,
watched_at,
created_at
)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(DomainEvent::ReviewLogged {
review_id: review.id().clone(),
movie_id: review.movie_id().clone(),
user_id: review.user_id().clone(),
rating: review.rating().clone(),
watched_at: *review.watched_at(),
})
}
async fn query_diary(
&self,
filter: &DiaryFilter,
) -> Result<Paginated<DiaryEntry>, DomainError> {
todo!()
async fn query_diary(&self, filter: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> {
let limit = filter.page.limit as i64;
let offset = filter.page.offset as i64;
let (total, rows) = match (&filter.movie_id, &filter.user_id) {
(None, None) => tokio::try_join!(
self.count_diary_entries(None),
self.fetch_all_diary_rows(&filter.sort_by, limit, offset)
)?,
(Some(id), None) => {
let id_str = id.value().to_string();
tokio::try_join!(
self.count_diary_entries(Some(id_str.as_str())),
self.fetch_movie_diary_rows(&id_str, &filter.sort_by, limit, offset)
)?
}
(None, Some(uid)) => {
let uid_str = uid.value().to_string();
match &filter.sort_by {
SortDirection::ByRatingDesc => tokio::try_join!(
self.count_user_diary_entries(&uid_str),
self.fetch_user_diary_rows_by_rating(&uid_str, limit, offset)
)?,
_ => tokio::try_join!(
self.count_user_diary_entries(&uid_str),
self.fetch_user_diary_rows_by_watched(&uid_str, limit, offset)
)?,
}
}
(Some(_), Some(_)) => {
return Err(DomainError::ValidationError(
"Combined movie_id + user_id filter not supported".into(),
));
}
};
let items = rows
.into_iter()
.map(DiaryRow::to_domain)
.collect::<Result<Vec<_>, _>>()?;
Ok(Paginated {
items,
total_count: total as u64,
limit: filter.page.limit,
offset: filter.page.offset,
})
}
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError> {
let id = review_id.value().to_string();
sqlx::query_as!(
ReviewRow,
"SELECT id, movie_id, user_id, rating, comment, watched_at, created_at
FROM reviews WHERE id = ?",
id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.map(ReviewRow::to_domain)
.transpose()
}
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError> {
let id = review_id.value().to_string();
sqlx::query!("DELETE FROM reviews WHERE id = ?", id)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(())
}
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError> {
let id = movie_id.value().to_string();
sqlx::query!("DELETE FROM movies WHERE id = ?", id)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(())
}
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> {
todo!()
let id_str = movie_id.value().to_string();
let movie = sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE id = ?",
id_str
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.ok_or_else(|| DomainError::NotFound(format!("Movie {}", id_str)))?
.to_domain()?;
let viewings = sqlx::query_as!(
ReviewRow,
"SELECT id, movie_id, user_id, rating, comment, watched_at, created_at
FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
id_str
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?
.into_iter()
.map(ReviewRow::to_domain)
.collect::<Result<Vec<_>, _>>()?;
Ok(ReviewHistory::new(movie, viewings))
}
async fn query_activity_feed(
&self,
page: &PageParams,
) -> Result<Paginated<FeedEntry>, DomainError> {
let limit = page.limit as i64;
let offset = page.offset as i64;
let (total, rows) = tokio::try_join!(
self.count_feed_entries(),
self.fetch_feed_rows(limit, offset)
)?;
let items = rows
.into_iter()
.map(FeedRow::to_domain)
.collect::<Result<Vec<_>, _>>()?;
Ok(Paginated {
items,
total_count: total as u64,
limit: page.limit,
offset: page.offset,
})
}
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError> {
let uid = user_id.value().to_string();
let (totals, fav_director, most_active) = tokio::try_join!(
self.fetch_user_totals(&uid),
self.fetch_user_favorite_director(&uid),
self.fetch_user_most_active_month(&uid)
)?;
let most_active_month = most_active.map(|ym| format_year_month(&ym));
Ok(UserStats {
total_movies: totals.total,
avg_rating: totals.avg_rating,
favorite_director: fav_director,
most_active_month,
})
}
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError> {
let uid = user_id.value().to_string();
let rows = sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.watched_at DESC",
uid
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?;
rows.into_iter().map(DiaryRow::to_domain).collect()
}
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError> {
let uid = user_id.value().to_string();
let (rating_rows, director_rows) = tokio::try_join!(
sqlx::query_as!(
MonthlyRatingRow,
r#"SELECT strftime('%Y-%m', watched_at) AS "month!",
AVG(CAST(rating AS REAL)) AS "avg_rating!: f64",
COUNT(*) AS "count!: i64"
FROM reviews
WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')
GROUP BY "month!"
ORDER BY "month!" ASC"#,
uid
)
.fetch_all(&self.pool),
sqlx::query_as!(
DirectorCountRow,
r#"SELECT m.director AS "director!",
COUNT(*) AS "count!: i64"
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ? AND m.director IS NOT NULL
GROUP BY m.director
ORDER BY COUNT(*) DESC
LIMIT 5"#,
uid
)
.fetch_all(&self.pool)
)
.map_err(Self::map_err)?;
let max_director_count = director_rows.iter().map(|d| d.count).max().unwrap_or(1);
let monthly_ratings = rating_rows
.into_iter()
.map(|r| MonthlyRating {
month_label: format_year_month(&r.month),
year_month: r.month,
avg_rating: r.avg_rating,
count: r.count,
})
.collect();
let top_directors = director_rows
.into_iter()
.map(|d| DirectorStat { director: d.director, count: d.count })
.collect();
Ok(UserTrends { monthly_ratings, top_directors, max_director_count })
}
}

View File

@@ -0,0 +1,9 @@
use domain::errors::DomainError;
use sqlx::SqlitePool;
pub(crate) async fn run(pool: &SqlitePool) -> Result<(), DomainError> {
sqlx::migrate!("./migrations")
.run(pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}

View File

@@ -0,0 +1,205 @@
use chrono::NaiveDateTime;
use domain::{
errors::DomainError,
models::{DiaryEntry, FeedEntry, Movie, Review, UserSummary},
value_objects::{
Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear,
ReviewId, UserId,
},
};
use uuid::Uuid;
#[derive(sqlx::FromRow)]
pub(crate) struct MovieRow {
pub id: String,
pub external_metadata_id: Option<String>,
pub title: String,
pub release_year: i64,
pub director: Option<String>,
pub poster_path: Option<String>,
}
impl MovieRow {
pub fn to_domain(self) -> Result<Movie, DomainError> {
let id = MovieId::from_uuid(parse_uuid(&self.id)?);
let external_metadata_id = self
.external_metadata_id
.map(ExternalMetadataId::new)
.transpose()?;
let title = MovieTitle::new(self.title)?;
let release_year = ReleaseYear::new(self.release_year as u16)?;
let poster_path = self.poster_path.map(PosterPath::new).transpose()?;
Ok(Movie::from_persistence(
id,
external_metadata_id,
title,
release_year,
self.director,
poster_path,
))
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct ReviewRow {
pub id: String,
pub movie_id: String,
pub user_id: String,
pub rating: i64,
pub comment: Option<String>,
pub watched_at: String,
pub created_at: String,
}
impl ReviewRow {
pub fn to_domain(self) -> Result<Review, DomainError> {
let id = ReviewId::from_uuid(parse_uuid(&self.id)?);
let movie_id = MovieId::from_uuid(parse_uuid(&self.movie_id)?);
let user_id = UserId::from_uuid(parse_uuid(&self.user_id)?);
let rating = Rating::new(self.rating as u8)?;
let comment = self.comment.map(Comment::new).transpose()?;
let watched_at = parse_datetime(&self.watched_at)?;
let created_at = parse_datetime(&self.created_at)?;
Ok(Review::from_persistence(
id, movie_id, user_id, rating, comment, watched_at, created_at,
))
}
}
// Used by query_diary JOIN — r.id aliased to review_id to avoid ambiguity with m.id
#[derive(sqlx::FromRow)]
pub(crate) struct DiaryRow {
pub id: String,
pub external_metadata_id: Option<String>,
pub title: String,
pub release_year: i64,
pub director: Option<String>,
pub poster_path: Option<String>,
pub review_id: String,
pub movie_id: String,
pub user_id: String,
pub rating: i64,
pub comment: Option<String>,
pub watched_at: String,
pub created_at: String,
}
impl DiaryRow {
pub fn to_domain(self) -> Result<DiaryEntry, DomainError> {
let movie = MovieRow {
id: self.id,
external_metadata_id: self.external_metadata_id,
title: self.title,
release_year: self.release_year,
director: self.director,
poster_path: self.poster_path,
}
.to_domain()?;
let review = ReviewRow {
id: self.review_id,
movie_id: self.movie_id,
user_id: self.user_id,
rating: self.rating,
comment: self.comment,
watched_at: self.watched_at,
created_at: self.created_at,
}
.to_domain()?;
Ok(DiaryEntry::new(movie, review))
}
}
// Like DiaryRow but includes user_email from JOIN with users table
#[derive(sqlx::FromRow)]
pub(crate) struct FeedRow {
pub id: String,
pub external_metadata_id: Option<String>,
pub title: String,
pub release_year: i64,
pub director: Option<String>,
pub poster_path: Option<String>,
pub review_id: String,
pub movie_id: String,
pub user_id: String,
pub rating: i64,
pub comment: Option<String>,
pub watched_at: String,
pub created_at: String,
pub user_email: String,
}
impl FeedRow {
pub fn to_domain(self) -> Result<FeedEntry, DomainError> {
let diary = DiaryRow {
id: self.id,
external_metadata_id: self.external_metadata_id,
title: self.title,
release_year: self.release_year,
director: self.director,
poster_path: self.poster_path,
review_id: self.review_id,
movie_id: self.movie_id,
user_id: self.user_id,
rating: self.rating,
comment: self.comment,
watched_at: self.watched_at,
created_at: self.created_at,
}
.to_domain()?;
Ok(FeedEntry::new(diary, self.user_email))
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct UserSummaryRow {
pub id: String,
pub email: String,
pub total_movies: i64,
pub avg_rating: Option<f64>,
}
impl UserSummaryRow {
pub fn to_domain(self) -> Result<UserSummary, DomainError> {
Ok(UserSummary {
user_id: UserId::from_uuid(parse_uuid(&self.id)?),
email: self.email,
total_movies: self.total_movies,
avg_rating: self.avg_rating,
})
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct UserTotalsRow {
pub total: i64,
pub avg_rating: Option<f64>,
}
#[derive(sqlx::FromRow)]
pub(crate) struct DirectorCountRow {
pub director: String,
pub count: i64,
}
#[derive(sqlx::FromRow)]
pub(crate) struct MonthlyRatingRow {
pub month: String,
pub avg_rating: f64,
pub count: i64,
}
pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> {
Uuid::parse_str(s)
.map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e)))
}
pub(crate) fn datetime_to_str(dt: &NaiveDateTime) -> String {
dt.format("%Y-%m-%d %H:%M:%S").to_string()
}
pub(crate) fn parse_datetime(s: &str) -> Result<NaiveDateTime, DomainError> {
NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S")
.map_err(|e| DomainError::InfrastructureError(format!("Invalid datetime '{}': {}", s, e)))
}

View File

@@ -0,0 +1,172 @@
use async_trait::async_trait;
use chrono::Utc;
use sqlx::SqlitePool;
use domain::{
errors::DomainError,
models::User,
ports::UserRepository,
value_objects::{Email, PasswordHash, UserId},
};
use super::models::UserSummaryRow;
pub struct SqliteUserRepository {
pool: SqlitePool,
}
impl SqliteUserRepository {
pub fn new(pool: SqlitePool) -> Self {
Self { pool }
}
fn map_err(e: sqlx::Error) -> DomainError {
tracing::error!("Database error: {:?}", e);
DomainError::InfrastructureError("Database operation failed".into())
}
}
#[async_trait]
impl UserRepository for SqliteUserRepository {
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError> {
let email_str = email.value();
let row = sqlx::query!(
"SELECT id, email, password_hash FROM users WHERE email = ?",
email_str
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
match row {
None => Ok(None),
Some(r) => {
let id = uuid::Uuid::parse_str(&r.id)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let email = Email::new(r.email)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let hash = PasswordHash::new(r.password_hash)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(Some(User::from_persistence(UserId::from_uuid(id), email, hash)))
}
}
}
async fn save(&self, user: &User) -> Result<(), DomainError> {
let id = user.id().value().to_string();
let email = user.email().value();
let hash = user.password_hash().value();
let created_at = Utc::now().to_rfc3339();
let result = sqlx::query!(
"INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
id,
email,
hash,
created_at
)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
if result.rows_affected() == 0 {
return Err(DomainError::ValidationError("Email already registered".into()));
}
Ok(())
}
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError> {
let id_str = id.value().to_string();
let row = sqlx::query!(
"SELECT id, email, password_hash FROM users WHERE id = ?",
id_str
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
match row {
None => Ok(None),
Some(r) => {
let uuid = uuid::Uuid::parse_str(&r.id)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let email = Email::new(r.email)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let hash = PasswordHash::new(r.password_hash)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(Some(User::from_persistence(UserId::from_uuid(uuid), email, hash)))
}
}
}
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> {
sqlx::query_as!(
UserSummaryRow,
r#"SELECT u.id,
u.email,
COUNT(DISTINCT r.movie_id) AS "total_movies!: i64",
AVG(CAST(r.rating AS REAL)) AS avg_rating
FROM users u
LEFT JOIN reviews r ON r.user_id = u.id
GROUP BY u.id, u.email
ORDER BY u.email ASC"#
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?
.into_iter()
.map(UserSummaryRow::to_domain)
.collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use sqlx::SqlitePool;
async fn setup() -> (SqlitePool, SqliteUserRepository) {
let pool = SqlitePool::connect(":memory:").await.unwrap();
sqlx::query(
"CREATE TABLE users (id TEXT PRIMARY KEY, email TEXT NOT NULL UNIQUE, password_hash TEXT NOT NULL, created_at TEXT NOT NULL)"
)
.execute(&pool)
.await
.unwrap();
let repo = SqliteUserRepository::new(pool.clone());
(pool, repo)
}
#[tokio::test]
async fn find_by_id_returns_none_when_not_found() {
let (_, repo) = setup().await;
let result = repo
.find_by_id(&UserId::from_uuid(uuid::Uuid::new_v4()))
.await
.unwrap();
assert!(result.is_none());
}
#[tokio::test]
async fn find_by_id_returns_user_when_found() {
let (pool, repo) = setup().await;
let id = uuid::Uuid::new_v4();
sqlx::query(
"INSERT INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)"
)
.bind(id.to_string())
.bind("test@example.com")
.bind("$argon2id$v=19$m=65536,t=2,p=1$fakesalt$fakehash")
.bind("2026-01-01T00:00:00Z")
.execute(&pool)
.await
.unwrap();
let result = repo
.find_by_id(&UserId::from_uuid(id))
.await
.unwrap();
assert!(result.is_some());
assert_eq!(result.unwrap().email().value(), "test@example.com");
}
}

View File

@@ -0,0 +1,13 @@
[package]
name = "template-askama"
version = "0.1.0"
edition = "2024"
[dependencies]
askama = { version = "0.16.0" }
serde = { workspace = true }
chrono = { workspace = true }
domain = { workspace = true }
application = { workspace = true }

View File

@@ -0,0 +1,231 @@
use askama::Template;
use chrono::Datelike;
use application::ports::{
ActivityFeedPageData, HtmlPageContext, HtmlRenderer, LoginPageData,
NewReviewPageData, ProfilePageData, RegisterPageData, UsersPageData,
};
use domain::models::{
DiaryEntry, FeedEntry, MonthActivity, MonthlyRating, UserStats, UserSummary, UserTrends,
collections::Paginated,
};
#[derive(Template)]
#[template(path = "diary.html")]
struct DiaryTemplate<'a> {
entries: &'a [DiaryEntry],
current_offset: u32,
limit: u32,
has_more: bool,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "login.html")]
struct LoginTemplate<'a> {
error: Option<&'a str>,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "register.html")]
struct RegisterTemplate<'a> {
error: Option<&'a str>,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "new_review.html")]
struct NewReviewTemplate<'a> {
error: Option<&'a str>,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "activity_feed.html")]
struct ActivityFeedTemplate<'a> {
entries: &'a [FeedEntry],
current_offset: u32,
limit: u32,
has_more: bool,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "users.html")]
struct UsersTemplate<'a> {
users: &'a [UserSummary],
ctx: &'a HtmlPageContext,
}
struct MonthlyRatingRow<'a> {
rating: &'a MonthlyRating,
bar_height_px: i64,
}
#[derive(Template)]
#[template(path = "profile.html")]
struct ProfileTemplate<'a> {
ctx: &'a HtmlPageContext,
profile_display_name: String,
stats: &'a UserStats,
view: &'a str,
entries: Option<&'a Paginated<DiaryEntry>>,
current_offset: u32,
has_more: bool,
limit: u32,
history: Option<&'a Vec<MonthActivity>>,
trends: Option<&'a UserTrends>,
monthly_rating_rows: Vec<MonthlyRatingRow<'a>>,
heatmap: Vec<HeatmapCell>,
}
struct HeatmapCell {
month_label: String,
count: i64,
alpha: f64,
}
#[allow(dead_code)]
fn relative_time(dt: chrono::NaiveDateTime) -> String {
let now = chrono::Utc::now().naive_utc();
let diff = now.signed_duration_since(dt);
if diff.num_seconds() <= 0 { return "just now".to_string(); }
let minutes = diff.num_minutes();
let hours = diff.num_hours();
let days = diff.num_days();
if minutes < 1 { return "just now".to_string(); }
if minutes < 60 { return format!("{} min ago", minutes); }
if hours < 24 { return format!("{} h ago", hours); }
if days == 1 { return "yesterday".to_string(); }
if days < 30 { return format!("{} days ago", days); }
dt.format("%b %-d, %Y").to_string()
}
fn build_heatmap(history: &[MonthActivity]) -> Vec<HeatmapCell> {
let current_year = chrono::Utc::now().year();
let count_for = |m: &str| -> i64 {
history.iter().find(|a| a.year_month == format!("{}-{}", current_year, m))
.map(|a| a.count)
.unwrap_or(0)
};
let months = [
("01", "Jan"), ("02", "Feb"), ("03", "Mar"), ("04", "Apr"),
("05", "May"), ("06", "Jun"), ("07", "Jul"), ("08", "Aug"),
("09", "Sep"), ("10", "Oct"), ("11", "Nov"), ("12", "Dec"),
];
let counts: Vec<i64> = months.iter().map(|(m, _)| count_for(m)).collect();
let max = counts.iter().copied().max().unwrap_or(0).max(1);
months.iter().zip(counts.iter()).map(|((_, label), &count)| {
let alpha = if count == 0 { 0.05 } else { 0.15 + 0.75 * (count as f64 / max as f64) };
HeatmapCell {
month_label: label.to_string(),
count,
alpha,
}
}).collect()
}
fn bar_height_px(avg_rating: f64) -> i64 {
(avg_rating / 5.0 * 60.0) as i64
}
pub struct AskamaHtmlRenderer;
impl AskamaHtmlRenderer {
pub fn new() -> Self {
Self {}
}
}
impl HtmlRenderer for AskamaHtmlRenderer {
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String> {
let has_more = (data.offset + data.limit) < data.total_count as u32;
DiaryTemplate {
entries: &data.items,
current_offset: data.offset,
limit: data.limit,
has_more,
ctx: &ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String> {
LoginTemplate {
error: data.error,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String> {
RegisterTemplate {
error: data.error,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String> {
NewReviewTemplate {
error: data.error,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String> {
ActivityFeedTemplate {
entries: &data.entries.items,
current_offset: data.current_offset,
limit: data.limit,
has_more: data.has_more,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_users_page(&self, data: UsersPageData) -> Result<String, String> {
UsersTemplate {
users: &data.users,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String> {
let heatmap = data.history.as_deref()
.map(|h| build_heatmap(h))
.unwrap_or_default();
let profile_display_name = data.profile_user_email
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
bar_height_px: bar_height_px(r.avg_rating),
rating: r,
}).collect())
.unwrap_or_default();
ProfileTemplate {
ctx: &data.ctx,
profile_display_name,
stats: &data.stats,
view: &data.view,
entries: data.entries.as_ref(),
current_offset: data.current_offset,
has_more: data.has_more,
limit: data.limit,
history: data.history.as_ref(),
trends: data.trends.as_ref(),
monthly_rating_rows,
heatmap,
}
.render()
.map_err(|e| e.to_string())
}
}

View File

@@ -0,0 +1,50 @@
{% extends "base.html" %}
{% block content %}
<div class="diary">
{% for entry in entries %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster">
<img src="/posters/{{ poster.value() }}" alt="">
</div>
{% endif %}
<div class="entry-body">
<div class="entry-title">
{{ entry.movie().title().value() }}
<span class="year">({{ entry.movie().release_year().value() }})</span>
</div>
{% if let Some(dir) = entry.movie().director() %}
<div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="feed-meta">
<a href="/users/{{ entry.review().user_id().value() }}" class="feed-user">{{ entry.user_display_name() }}</a>
<span class="feed-time">{{ entry.review().watched_at().format("%b %-d, %Y") }}</span>
</div>
{% if ctx.is_current_user(entry.review().user_id().value()) %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %}
</div>
</article>
{% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset >= limit %}
<a href="/?offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="/?offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endblock %}

View File

@@ -0,0 +1,42 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>{{ ctx.page_title }}</title>
<meta name="description" content="A personal movie diary — track what you watch, rate and review films.">
<meta property="og:type" content="website">
<meta property="og:site_name" content="Movies Diary">
<meta property="og:title" content="{{ ctx.page_title }}">
<meta property="og:url" content="{{ ctx.canonical_url }}">
<meta name="twitter:card" content="summary">
<meta name="twitter:title" content="{{ ctx.page_title }}">
<link rel="canonical" href="{{ ctx.canonical_url }}">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
<link rel="stylesheet" href="/static/style.css">
</head>
<body>
<header>
<a href="/" class="site-title">Movies Diary</a>
<nav>
<a href="/">Feed</a>
<a href="/users">Users</a>
<a href="{{ ctx.rss_url }}">RSS</a>
{% if let Some(email) = ctx.user_email %}
<a href="/reviews/new">Add Review</a>
<a href="/logout">Logout</a>
{% else %}
<a href="/login">Login</a>
{% if ctx.register_enabled %}
<a href="/register">Register</a>
{% endif %}
{% endif %}
</nav>
</header>
<main>
{% block content %}{% endblock %}
</main>
</body>
</html>

View File

@@ -0,0 +1,51 @@
{% extends "base.html" %}
{% block content %}
<div class="diary">
{% for entry in entries %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster">
<img src="/posters/{{ poster.value() }}" alt="">
</div>
{% endif %}
<div class="entry-body">
<div class="entry-title">
{{ entry.movie().title().value() }}
<span class="year">({{ entry.movie().release_year().value() }})</span>
</div>
{% if let Some(dir) = entry.movie().director() %}
<div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
<span class="star {% if entry.review().rating().value() >= 1 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 2 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 3 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 4 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 5 %}filled{% else %}empty{% endif %}"></span>
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
{% if let Some(uid) = ctx.user_id %}
{% if *uid == entry.review().user_id().value() %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %}
{% endif %}
</div>
</article>
{% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset > 0 %}
<a href="/?offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="/?offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endblock %}

View File

@@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block content %}
<h1>Login</h1>
{% if let Some(err) = error %}
<p class="error">{{ err }}</p>
{% endif %}
<form method="POST" action="/login">
<label>
Email<br>
<input type="email" name="email" required autocomplete="email">
</label>
<label>
Password<br>
<input type="password" name="password" required autocomplete="current-password">
</label>
<button type="submit">Login</button>
</form>
{% endblock %}

View File

@@ -0,0 +1,40 @@
{% extends "base.html" %}
{% block content %}
<h1>Log a Review</h1>
{% if let Some(err) = error %}
<p class="error">{{ err }}</p>
{% endif %}
<form method="POST" action="/reviews">
<label>
OMDB ID <span class="optional">(optional)</span><br>
<input type="text" name="external_metadata_id" placeholder="tt0166924">
</label>
<hr>
<label>
Title<br>
<input type="text" name="manual_title">
</label>
<label>
Year<br>
<input type="number" name="manual_release_year" min="1888" max="2100">
</label>
<label>
Director<br>
<input type="text" name="manual_director">
</label>
<hr>
<label>
Rating (05)<br>
<input type="number" name="rating" min="0" max="5" required>
</label>
<label>
Watched<br>
<input type="datetime-local" name="watched_at" required>
</label>
<label>
Comment<br>
<textarea name="comment"></textarea>
</label>
<button type="submit">Log Review</button>
</form>
{% endblock %}

View File

@@ -0,0 +1,165 @@
{% extends "base.html" %}
{% block content %}
<div class="profile">
<div class="stats-header">
<div class="profile-name">{{ profile_display_name }}</div>
<div class="stats-grid">
<div class="stat-tile">
<div class="stat-value">{{ stats.total_movies }}</div>
<div class="stat-label">movies</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.avg_rating_display() }}★</div>
<div class="stat-label">avg rating</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.favorite_director_display() }}</div>
<div class="stat-label">fav director</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.most_active_month_display() }}</div>
<div class="stat-label">most active</div>
</div>
</div>
</div>
<div class="view-tabs">
<a href="?view=recent" class="view-tab {% if view == "recent" %}active{% endif %}">Recent</a>
<a href="?view=ratings" class="view-tab {% if view == "ratings" %}active{% endif %}">Top Rated</a>
<a href="?view=history" class="view-tab {% if view == "history" %}active{% endif %}">History</a>
<a href="?view=trends" class="view-tab {% if view == "trends" %}active{% endif %}">Trends</a>
</div>
{% if view == "history" %}
{% if let Some(hist) = history %}
<div class="heatmap-section">
<div class="heatmap-label">Movies watched this year</div>
<div class="heatmap">
{% for cell in heatmap %}
<div class="heatmap-cell" style="--alpha: {{ cell.alpha }}">
<div class="heatmap-count">{{ cell.count }}</div>
<div class="heatmap-month">{{ cell.month_label }}</div>
</div>
{% endfor %}
</div>
</div>
{% for month in hist %}
<div class="history-month">
<h3 class="month-heading">{{ month.month_label }} <span class="month-count">{{ month.count }}</span></h3>
<div class="diary">
{% for entry in month.entries %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster"><img src="/posters/{{ poster.value() }}" alt=""></div>
{% endif %}
<div class="entry-body">
<div class="entry-title">{{ entry.movie().title().value() }} <span class="year">({{ entry.movie().release_year().value() }})</span></div>
{% if let Some(dir) = entry.movie().director() %}<div class="director">{{ dir }}</div>{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
<div class="watched-at">{{ entry.review().watched_at().format("%b %-d") }}</div>
</div>
</article>
{% endfor %}
</div>
</div>
{% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
{% endif %}
{% elif view == "trends" %}
{% if let Some(t) = trends %}
<div class="trends-section">
{% if !monthly_rating_rows.is_empty() %}
<div class="chart-block">
<div class="chart-label">Average rating per month</div>
<div class="bar-chart">
{% for row in monthly_rating_rows %}
<div class="bar-col">
<div class="bar-value">{{ "{:.1}"|format(row.rating.avg_rating) }}</div>
<div class="bar-fill" style="height: {{ row.bar_height_px }}px"></div>
<div class="bar-month">{{ row.rating.month_label }}</div>
</div>
{% endfor %}
</div>
</div>
{% endif %}
{% if !t.top_directors.is_empty() %}
<div class="chart-block">
<div class="chart-label">Most watched directors</div>
<div class="director-chart">
{% for d in t.top_directors %}
<div class="director-row">
<div class="director-name">{{ d.director }}</div>
<div class="director-bar">
{% if t.max_director_count > 0 %}
<div class="director-bar-fill" style="width: {{ d.count * 100 / t.max_director_count }}%"></div>
{% else %}
<div class="director-bar-fill" style="width: 0%"></div>
{% endif %}
</div>
<div class="director-count">{{ d.count }}</div>
</div>
{% endfor %}
</div>
</div>
{% endif %}
</div>
{% endif %}
{% else %}
{% if let Some(paged) = entries %}
<div class="diary">
{% for entry in paged.items %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster">
<img src="/posters/{{ poster.value() }}" alt="">
</div>
{% endif %}
<div class="entry-body">
<div class="entry-title">
{{ entry.movie().title().value() }}
<span class="year">({{ entry.movie().release_year().value() }})</span>
</div>
{% if let Some(dir) = entry.movie().director() %}
<div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
{% if ctx.is_current_user(entry.review().user_id().value()) %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %}
</div>
</article>
{% else %}
<p class="empty">No reviews yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset >= limit %}
<a href="?view={{ view }}&offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="?view={{ view }}&offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endif %}
{% endif %}
</div>
{% endblock %}

View File

@@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block content %}
<h1>Register</h1>
{% if let Some(err) = error %}
<p class="error">{{ err }}</p>
{% endif %}
<form method="POST" action="/register">
<label>
Email<br>
<input type="email" name="email" required autocomplete="email">
</label>
<label>
Password<br>
<input type="password" name="password" required autocomplete="new-password">
</label>
<button type="submit">Register</button>
</form>
{% endblock %}

View File

@@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block content %}
<div class="users-list">
<h2 class="page-title">Members</h2>
{% for user in users %}
<div class="user-row">
<div class="user-avatar">{{ user.initial() }}</div>
<div class="user-info">
<div class="user-name">{{ user.display_name() }}</div>
<div class="user-meta">{{ user.total_movies }} movies · avg {{ user.avg_rating_display() }}★</div>
</div>
<a href="/users/{{ user.user_id.value() }}" class="btn-secondary">View profile →</a>
</div>
{% else %}
<p class="empty">No users yet.</p>
{% endfor %}
</div>
{% endblock %}

View File

@@ -4,7 +4,11 @@ version = "0.1.0"
edition = "2024"
[dependencies]
async-trait = { workspace = true }
domain = { workspace = true }
uuid = { workspace = true }
chrono = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]
tokio = { workspace = true }

View File

@@ -14,7 +14,23 @@ pub struct LogReviewCommand {
pub watched_at: NaiveDateTime,
}
#[derive(Clone)]
pub struct SyncPosterCommand {
pub movie_id: Uuid,
pub external_metadata_id: String,
}
pub struct LoginCommand {
pub email: String,
pub password: String,
}
pub struct RegisterCommand {
pub email: String,
pub password: String,
}
pub struct DeleteReviewCommand {
pub review_id: Uuid,
pub requesting_user_id: Uuid,
}

View File

@@ -0,0 +1,16 @@
#[derive(Clone)]
pub struct AppConfig {
pub allow_registration: bool,
pub base_url: String,
}
impl AppConfig {
pub fn from_env() -> Self {
let allow_registration = std::env::var("ALLOW_REGISTRATION")
.map(|v| v == "true" || v == "1")
.unwrap_or(false);
let base_url = std::env::var("BASE_URL")
.unwrap_or_else(|_| "http://localhost:3000".to_string());
Self { allow_registration, base_url }
}
}

View File

@@ -2,9 +2,11 @@ use std::sync::Arc;
use domain::ports::{
AuthService, EventPublisher, MetadataClient, MovieRepository, PasswordHasher,
PosterFetcherClient, PosterStorage,
PosterFetcherClient, PosterStorage, UserRepository,
};
use crate::config::AppConfig;
#[derive(Clone)]
pub struct AppContext {
pub repository: Arc<dyn MovieRepository>,
@@ -14,4 +16,6 @@ pub struct AppContext {
pub event_publisher: Arc<dyn EventPublisher>,
pub auth_service: Arc<dyn AuthService>,
pub password_hasher: Arc<dyn PasswordHasher>,
pub user_repository: Arc<dyn UserRepository>,
pub config: AppConfig,
}

View File

@@ -1,4 +1,7 @@
pub mod commands;
pub mod config;
pub mod context;
pub mod movie_resolver;
pub mod ports;
pub mod queries;
pub mod use_cases;

View File

@@ -0,0 +1,595 @@
use async_trait::async_trait;
use domain::{
errors::DomainError,
models::Movie,
ports::{MetadataClient, MetadataSearchCriteria, MovieRepository},
value_objects::{ExternalMetadataId, MovieTitle, ReleaseYear},
};
use crate::commands::LogReviewCommand;
pub struct MovieResolverDeps<'a> {
pub repository: &'a dyn MovieRepository,
pub metadata_client: &'a dyn MetadataClient,
}
#[async_trait]
pub trait ResolutionStrategy: Send + Sync {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool;
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError>;
}
pub struct ExternalIdStrategy;
pub struct TitleSearchStrategy;
pub struct ManualMovieStrategy;
pub struct MovieResolver {
strategies: Vec<Box<dyn ResolutionStrategy>>,
}
impl MovieResolver {
pub fn default_pipeline() -> Self {
Self {
strategies: vec![
Box::new(ExternalIdStrategy),
Box::new(TitleSearchStrategy),
Box::new(ManualMovieStrategy),
],
}
}
pub async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<(Movie, bool), DomainError> {
for strategy in &self.strategies {
if strategy.can_handle(cmd) {
if let Some(result) = strategy.resolve(cmd, deps).await? {
return Ok(result);
}
}
}
Err(DomainError::ValidationError(
"Manual title required if TMDB fetch fails or is omitted".into(),
))
}
}
#[async_trait]
impl ResolutionStrategy for ExternalIdStrategy {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
cmd.external_metadata_id.is_some()
}
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError> {
let ext_id_str = cmd.external_metadata_id.as_deref().unwrap();
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
if let Some(m) = deps.repository.get_movie_by_external_id(&tmdb_id).await? {
return Ok(Some((m, false)));
}
match deps
.metadata_client
.fetch_movie_metadata(&MetadataSearchCriteria::ImdbId(tmdb_id))
.await
{
Ok(m) => Ok(Some((m, true))),
Err(e) => {
tracing::warn!(
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
e
);
Ok(None)
}
}
}
}
#[async_trait]
impl ResolutionStrategy for TitleSearchStrategy {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
cmd.manual_title.is_some()
}
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError> {
let title = cmd.manual_title.as_deref().unwrap();
let criteria = MetadataSearchCriteria::Title {
title: title.to_string(),
year: cmd.manual_release_year,
};
match deps.metadata_client.fetch_movie_metadata(&criteria).await {
Ok(m) => Ok(Some((m, true))),
Err(e) => {
tracing::warn!("OMDb title search failed, falling back to manual: {:?}", e);
Ok(None)
}
}
}
}
#[async_trait]
impl ResolutionStrategy for ManualMovieStrategy {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
cmd.manual_title.is_some()
}
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError> {
let title_str = match &cmd.manual_title {
Some(t) => t,
None => return Ok(None),
};
let year_val = cmd.manual_release_year.ok_or_else(|| {
DomainError::ValidationError(
"Manual release year required if TMDB fetch fails or is omitted".into(),
)
})?;
let title = MovieTitle::new(title_str.clone())?;
let release_year = ReleaseYear::new(year_val)?;
let candidates = deps
.repository
.get_movies_by_title_and_year(&title, &release_year)
.await?;
let matched = candidates
.into_iter()
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
if let Some(existing) = matched {
Ok(Some((existing, false)))
} else {
let new_movie =
Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
Ok(Some((new_movie, true)))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::NaiveDate;
use domain::{
errors::DomainError,
events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
ports::{MetadataSearchCriteria, MovieRepository},
value_objects::{
ExternalMetadataId, MovieId, MovieTitle, PosterUrl, ReleaseYear, ReviewId,
},
};
fn make_cmd(ext_id: Option<&str>, title: Option<&str>, year: Option<u16>) -> LogReviewCommand {
LogReviewCommand {
external_metadata_id: ext_id.map(String::from),
manual_title: title.map(String::from),
manual_release_year: year,
manual_director: None,
user_id: uuid::Uuid::new_v4(),
rating: 4,
comment: None,
watched_at: NaiveDate::from_ymd_opt(2024, 1, 1)
.unwrap()
.and_hms_opt(0, 0, 0)
.unwrap(),
}
}
fn make_movie() -> Movie {
Movie::new(
None,
MovieTitle::new("Inception".to_string()).unwrap(),
ReleaseYear::new(2010).unwrap(),
None,
None,
)
}
struct RepoWithExternalMovie(Movie);
struct RepoEmpty;
struct RepoWithTitleMatch(Movie);
#[async_trait]
impl MovieRepository for RepoWithExternalMovie {
async fn get_movie_by_external_id(
&self,
_: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> {
Ok(Some(self.0.clone()))
}
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movies_by_title_and_year(
&self,
_: &MovieTitle,
_: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> {
panic!("unexpected")
}
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
panic!("unexpected")
}
async fn query_diary(
&self,
_: &DiaryFilter,
) -> Result<Paginated<DiaryEntry>, DomainError> {
panic!("unexpected")
}
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
panic!("unexpected")
}
async fn get_review_by_id(
&self,
_: &ReviewId,
) -> Result<Option<Review>, DomainError> {
panic!("unexpected")
}
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl MovieRepository for RepoEmpty {
async fn get_movie_by_external_id(
&self,
_: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> {
Ok(None)
}
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movies_by_title_and_year(
&self,
_: &MovieTitle,
_: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> {
Ok(vec![])
}
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
panic!("unexpected")
}
async fn query_diary(
&self,
_: &DiaryFilter,
) -> Result<Paginated<DiaryEntry>, DomainError> {
panic!("unexpected")
}
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
panic!("unexpected")
}
async fn get_review_by_id(
&self,
_: &ReviewId,
) -> Result<Option<Review>, DomainError> {
panic!("unexpected")
}
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl MovieRepository for RepoWithTitleMatch {
async fn get_movie_by_external_id(
&self,
_: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movies_by_title_and_year(
&self,
_: &MovieTitle,
_: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> {
Ok(vec![self.0.clone()])
}
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
panic!("unexpected")
}
async fn query_diary(
&self,
_: &DiaryFilter,
) -> Result<Paginated<DiaryEntry>, DomainError> {
panic!("unexpected")
}
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
panic!("unexpected")
}
async fn get_review_by_id(
&self,
_: &ReviewId,
) -> Result<Option<Review>, DomainError> {
panic!("unexpected")
}
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
}
struct MetaReturnsMovie(Movie);
struct MetaErrors;
#[async_trait]
impl MetadataClient for MetaReturnsMovie {
async fn fetch_movie_metadata(
&self,
_: &MetadataSearchCriteria,
) -> Result<Movie, DomainError> {
Ok(self.0.clone())
}
async fn get_poster_url(
&self,
_: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
panic!("unexpected")
}
}
#[async_trait]
impl MetadataClient for MetaErrors {
async fn fetch_movie_metadata(
&self,
_: &MetadataSearchCriteria,
) -> Result<Movie, DomainError> {
Err(DomainError::InfrastructureError("metadata unavailable".into()))
}
async fn get_poster_url(
&self,
_: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
panic!("unexpected")
}
}
// --- ExternalIdStrategy ---
#[test]
fn external_id_strategy_can_handle_cmd_with_id() {
let cmd = make_cmd(Some("tt123"), None, None);
assert!(ExternalIdStrategy.can_handle(&cmd));
}
#[test]
fn external_id_strategy_cannot_handle_cmd_without_id() {
let cmd = make_cmd(None, Some("Inception"), Some(2010));
assert!(!ExternalIdStrategy.can_handle(&cmd));
}
#[tokio::test]
async fn external_id_strategy_returns_cached_movie() {
let movie = make_movie();
let repo = RepoWithExternalMovie(movie.clone());
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, false))));
}
#[tokio::test]
async fn external_id_strategy_fetches_from_metadata_when_not_cached() {
let movie = make_movie();
let repo = RepoEmpty;
let meta = MetaReturnsMovie(movie);
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, true))));
}
#[tokio::test]
async fn external_id_strategy_falls_through_on_metadata_error() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(result.is_none());
}
// --- TitleSearchStrategy ---
#[test]
fn title_strategy_can_handle_cmd_with_title() {
let cmd = make_cmd(None, Some("Inception"), Some(2010));
assert!(TitleSearchStrategy.can_handle(&cmd));
}
#[test]
fn title_strategy_cannot_handle_cmd_without_title() {
let cmd = make_cmd(Some("tt123"), None, None);
assert!(!TitleSearchStrategy.can_handle(&cmd));
}
#[tokio::test]
async fn title_strategy_fetches_from_metadata() {
let movie = make_movie();
let repo = RepoEmpty;
let meta = MetaReturnsMovie(movie);
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, true))));
}
#[tokio::test]
async fn title_strategy_falls_through_on_metadata_error() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(result.is_none());
}
// --- ManualMovieStrategy ---
#[test]
fn manual_strategy_can_handle_cmd_with_title() {
let cmd = make_cmd(None, Some("Inception"), Some(2010));
assert!(ManualMovieStrategy.can_handle(&cmd));
}
#[test]
fn manual_strategy_cannot_handle_cmd_without_title() {
let cmd = make_cmd(Some("tt123"), None, None);
assert!(!ManualMovieStrategy.can_handle(&cmd));
}
#[tokio::test]
async fn manual_strategy_returns_existing_movie() {
let movie = make_movie();
let repo = RepoWithTitleMatch(movie.clone());
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, false))));
}
#[tokio::test]
async fn manual_strategy_creates_new_movie_when_no_match() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, true))));
}
#[tokio::test]
async fn manual_strategy_errors_without_year() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), None);
assert!(ManualMovieStrategy.resolve(&cmd, &deps).await.is_err());
}
// --- MovieResolver pipeline ---
#[tokio::test]
async fn resolver_returns_error_when_no_strategy_matches() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, None, None);
let result = MovieResolver::default_pipeline().resolve(&cmd, &deps).await;
assert!(result.is_err());
}
#[tokio::test]
async fn resolver_uses_cached_movie_when_external_id_matches() {
let movie = make_movie();
let repo = RepoWithExternalMovie(movie.clone());
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let (_, is_new) = MovieResolver::default_pipeline()
.resolve(&cmd, &deps)
.await
.unwrap();
assert!(!is_new);
}
#[tokio::test]
async fn resolver_falls_through_to_manual_when_external_and_title_both_fail() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), Some("Inception"), Some(2010));
let (_, is_new) = MovieResolver::default_pipeline()
.resolve(&cmd, &deps)
.await
.unwrap();
assert!(is_new);
}
}

View File

@@ -0,0 +1,74 @@
use uuid::Uuid;
use domain::models::{DiaryEntry, FeedEntry, MonthActivity, UserStats, UserSummary, UserTrends, collections::Paginated};
pub struct HtmlPageContext {
pub user_email: Option<String>,
pub user_id: Option<Uuid>,
pub register_enabled: bool,
pub rss_url: String,
pub page_title: String,
pub canonical_url: String,
}
impl HtmlPageContext {
pub fn is_current_user(&self, id: Uuid) -> bool {
self.user_id == Some(id)
}
}
pub struct LoginPageData<'a> {
pub ctx: HtmlPageContext,
pub error: Option<&'a str>,
}
pub struct RegisterPageData<'a> {
pub ctx: HtmlPageContext,
pub error: Option<&'a str>,
}
pub struct NewReviewPageData<'a> {
pub ctx: HtmlPageContext,
pub error: Option<&'a str>,
}
pub struct ActivityFeedPageData {
pub ctx: HtmlPageContext,
pub entries: Paginated<FeedEntry>,
pub current_offset: u32,
pub has_more: bool,
pub limit: u32,
}
pub struct UsersPageData {
pub ctx: HtmlPageContext,
pub users: Vec<UserSummary>,
}
pub struct ProfilePageData {
pub ctx: HtmlPageContext,
pub profile_user_id: Uuid,
pub profile_user_email: String,
pub stats: UserStats,
pub view: String,
pub entries: Option<Paginated<DiaryEntry>>,
pub current_offset: u32,
pub has_more: bool,
pub limit: u32,
pub history: Option<Vec<MonthActivity>>,
pub trends: Option<UserTrends>,
}
pub trait HtmlRenderer: Send + Sync {
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String>;
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String>;
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String>;
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String>;
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String>;
fn render_users_page(&self, data: UsersPageData) -> Result<String, String>;
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String>;
}
pub trait RssFeedRenderer: Send + Sync {
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String>;
}

View File

@@ -6,8 +6,23 @@ pub struct GetDiaryQuery {
pub offset: Option<u32>,
pub sort_by: Option<SortDirection>,
pub movie_id: Option<Uuid>,
pub user_id: Option<Uuid>,
}
pub struct GetReviewHistoryQuery {
pub movie_id: Uuid,
}
pub struct GetActivityFeedQuery {
pub limit: Option<u32>,
pub offset: Option<u32>,
}
pub struct GetUsersQuery;
pub struct GetUserProfileQuery {
pub user_id: Uuid,
pub view: String,
pub limit: Option<u32>,
pub offset: Option<u32>,
}

View File

@@ -0,0 +1,27 @@
use domain::{errors::DomainError, value_objects::{ReviewId, UserId}};
use crate::{commands::DeleteReviewCommand, context::AppContext};
pub async fn execute(ctx: &AppContext, cmd: DeleteReviewCommand) -> Result<(), DomainError> {
let review_id = ReviewId::from_uuid(cmd.review_id);
let requesting_user_id = UserId::from_uuid(cmd.requesting_user_id);
let review = ctx
.repository
.get_review_by_id(&review_id)
.await?
.ok_or_else(|| DomainError::NotFound(format!("review {}", cmd.review_id)))?;
if review.user_id() != &requesting_user_id {
return Err(DomainError::Unauthorized("not your review".into()));
}
let movie_id = review.movie_id().clone();
ctx.repository.delete_review(&review_id).await?;
let history = ctx.repository.get_review_history(&movie_id).await?;
if history.viewings().is_empty() {
ctx.repository.delete_movie(&movie_id).await?;
}
Ok(())
}

View File

@@ -0,0 +1,13 @@
use domain::{
errors::DomainError,
models::{FeedEntry, collections::{PageParams, Paginated}},
};
use crate::{context::AppContext, queries::GetActivityFeedQuery};
pub async fn execute(
ctx: &AppContext,
query: GetActivityFeedQuery,
) -> Result<Paginated<FeedEntry>, DomainError> {
let page = PageParams::new(query.limit, query.offset)?;
ctx.repository.query_activity_feed(&page).await
}

View File

@@ -4,7 +4,7 @@ use domain::{
DiaryEntry, DiaryFilter, SortDirection,
collections::{PageParams, Paginated},
},
value_objects::MovieId,
value_objects::{MovieId, UserId},
};
use crate::{context::AppContext, queries::GetDiaryQuery};
@@ -14,16 +14,15 @@ pub async fn execute(
query: GetDiaryQuery,
) -> Result<Paginated<DiaryEntry>, DomainError> {
let page = PageParams::new(query.limit, query.offset)?;
let movie_id = query.movie_id.map(MovieId::from_uuid);
let user_id = query.user_id.map(UserId::from_uuid);
let filter = DiaryFilter {
sort_by: query.sort_by.unwrap_or(SortDirection::Descending),
page,
movie_id,
user_id,
};
let paginated_results = ctx.repository.query_diary(&filter).await?;
Ok(paginated_results)
ctx.repository.query_diary(&filter).await
}

View File

@@ -0,0 +1,93 @@
use domain::{
errors::DomainError,
models::{
DiaryEntry, DiaryFilter, MonthActivity, SortDirection, UserStats, UserTrends,
collections::{PageParams, Paginated},
},
value_objects::UserId,
};
use crate::{context::AppContext, queries::GetUserProfileQuery};
pub struct UserProfileData {
pub stats: UserStats,
pub entries: Option<Paginated<DiaryEntry>>,
pub history: Option<Vec<MonthActivity>>,
pub trends: Option<UserTrends>,
}
pub async fn execute(
ctx: &AppContext,
query: GetUserProfileQuery,
) -> Result<UserProfileData, DomainError> {
let user_id = UserId::from_uuid(query.user_id);
let stats = ctx.repository.get_user_stats(&user_id).await?;
match query.view.as_str() {
"history" => {
// V1: loads all entries into memory. Personal diaries are bounded in size;
// spec calls for showing every movie grouped by month, so full load is intentional.
let all_entries = ctx.repository.get_user_history(&user_id).await?;
let history = group_by_month(all_entries);
Ok(UserProfileData { stats, entries: None, history: Some(history), trends: None })
}
"trends" => {
let trends = ctx.repository.get_user_trends(&user_id).await?;
Ok(UserProfileData { stats, entries: None, history: None, trends: Some(trends) })
}
"ratings" => {
let page = PageParams::new(query.limit, query.offset)?;
let filter = DiaryFilter {
sort_by: SortDirection::ByRatingDesc,
page,
movie_id: None,
user_id: Some(user_id),
};
let entries = ctx.repository.query_diary(&filter).await?;
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
}
"recent" => {
let page = PageParams::new(query.limit, query.offset)?;
let filter = DiaryFilter {
sort_by: SortDirection::Descending,
page,
movie_id: None,
user_id: Some(user_id),
};
let entries = ctx.repository.query_diary(&filter).await?;
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
}
other => Err(DomainError::ValidationError(format!("unknown view: {}", other))),
}
}
fn group_by_month(entries: Vec<DiaryEntry>) -> Vec<MonthActivity> {
use std::collections::BTreeMap;
let mut map: BTreeMap<String, Vec<DiaryEntry>> = BTreeMap::new();
for entry in entries {
let ym = entry.review().watched_at().format("%Y-%m").to_string();
map.entry(ym).or_default().push(entry);
}
let mut result: Vec<MonthActivity> = map
.into_iter()
.map(|(ym, entries)| MonthActivity {
month_label: format_year_month_long(&ym),
count: entries.len() as i64,
entries,
year_month: ym,
})
.collect();
result.reverse();
result
}
fn format_year_month_long(ym: &str) -> String {
let parts: Vec<&str> = ym.splitn(2, '-').collect();
if parts.len() != 2 { return ym.to_string(); }
let month = match parts[1] {
"01" => "January", "02" => "February", "03" => "March", "04" => "April",
"05" => "May", "06" => "June", "07" => "July", "08" => "August",
"09" => "September", "10" => "October", "11" => "November", "12" => "December",
_ => parts[1],
};
format!("{} {}", month, parts[0])
}

View File

@@ -0,0 +1,9 @@
use domain::{errors::DomainError, models::UserSummary};
use crate::{context::AppContext, queries::GetUsersQuery};
pub async fn execute(
ctx: &AppContext,
_query: GetUsersQuery,
) -> Result<Vec<UserSummary>, DomainError> {
ctx.user_repository.list_with_stats().await
}

View File

@@ -2,17 +2,25 @@ use domain::{
errors::DomainError,
events::DomainEvent,
models::{Movie, Review},
value_objects::{Comment, ExternalMetadataId, MovieTitle, Rating, ReleaseYear, UserId},
value_objects::{Comment, Rating, UserId},
};
use crate::{commands::LogReviewCommand, context::AppContext};
use crate::{
commands::LogReviewCommand,
context::AppContext,
movie_resolver::{MovieResolver, MovieResolverDeps},
};
pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), DomainError> {
let rating = Rating::new(cmd.rating)?;
let user_id = UserId::from_uuid(cmd.user_id);
let comment = cmd.comment.clone().map(Comment::new).transpose()?;
let (movie, is_new_movie) = resolve_movie(ctx, &cmd).await?;
let deps = MovieResolverDeps {
repository: ctx.repository.as_ref(),
metadata_client: ctx.metadata_client.as_ref(),
};
let (movie, is_new_movie) = MovieResolver::default_pipeline().resolve(&cmd, &deps).await?;
ctx.repository.upsert_movie(&movie).await?;
@@ -24,76 +32,6 @@ pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), Doma
Ok(())
}
async fn resolve_movie(
ctx: &AppContext,
cmd: &LogReviewCommand,
) -> Result<(Movie, bool), DomainError> {
if let Some(ext_id_str) = &cmd.external_metadata_id {
if let Some(resolved) = resolve_external_movie(ctx, ext_id_str).await? {
return Ok(resolved);
}
}
resolve_manual_movie(ctx, cmd).await
}
async fn resolve_external_movie(
ctx: &AppContext,
ext_id_str: &str,
) -> Result<Option<(Movie, bool)>, DomainError> {
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
if let Some(m) = ctx.repository.get_movie_by_external_id(&tmdb_id).await? {
return Ok(Some((m, false)));
}
match ctx.metadata_client.fetch_movie_metadata(&tmdb_id).await {
Ok(m) => Ok(Some((m, true))),
Err(e) => {
tracing::warn!(
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
e
);
Ok(None)
}
}
}
async fn resolve_manual_movie(
ctx: &AppContext,
cmd: &LogReviewCommand,
) -> Result<(Movie, bool), DomainError> {
let title_str = cmd.manual_title.as_ref().ok_or_else(|| {
DomainError::ValidationError(
"Manual title required if TMDB fetch fails or is omitted".into(),
)
})?;
let year_val = cmd.manual_release_year.ok_or_else(|| {
DomainError::ValidationError(
"Manual release year required if TMDB fetch fails or is omitted".into(),
)
})?;
let title = MovieTitle::new(title_str.clone())?;
let release_year = ReleaseYear::new(year_val)?;
let candidates = ctx
.repository
.get_movies_by_title_and_year(&title, &release_year)
.await?;
let matched_movie = candidates
.into_iter()
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
if let Some(existing_movie) = matched_movie {
Ok((existing_movie, false))
} else {
let new_movie = Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
Ok((new_movie, true))
}
}
async fn publish_events(
ctx: &AppContext,
movie: &Movie,

View File

@@ -0,0 +1,39 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
use domain::{errors::DomainError, value_objects::Email};
use crate::{commands::LoginCommand, context::AppContext};
pub struct LoginResult {
pub token: String,
pub user_id: Uuid,
pub email: String,
pub expires_at: DateTime<Utc>,
}
pub async fn execute(ctx: &AppContext, cmd: LoginCommand) -> Result<LoginResult, DomainError> {
let email = Email::new(cmd.email)?;
let user = ctx
.user_repository
.find_by_email(&email)
.await?
.ok_or_else(|| DomainError::Unauthorized("Invalid credentials".into()))?;
let valid = ctx
.password_hasher
.verify(&cmd.password, user.password_hash())
.await?;
if !valid {
return Err(DomainError::Unauthorized("Invalid credentials".into()));
}
let generated = ctx.auth_service.generate_token(user.id()).await?;
Ok(LoginResult {
token: generated.token,
user_id: user.id().value(),
email: user.email().value().to_string(),
expires_at: generated.expires_at,
})
}

Some files were not shown because too many files have changed in this diff Show More