Compare commits

...

86 Commits

Author SHA1 Message Date
7a66661932 css 2026-05-04 23:24:04 +02:00
b30a6a102b feat: per-page titles, OG/SEO tags, HOST/PORT env vars, BASE_URL in config 2026-05-04 22:38:58 +02:00
38a3aa6bbf fix: update .gitignore to include db-shm and db-wal files
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 22:23:08 +02:00
3135a15cb3 fix: WAL mode + busy_timeout for SQLite, fix rate limiter TOCTOU race 2026-05-04 22:10:19 +02:00
d083f8ae3d refactor: use constant for minimum password length and API rate limit
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 21:41:07 +02:00
874c406d4a fix: security hardening — SameSite=Strict, Secure cookie flag, password min length, generic registration error, auth rate limiting 2026-05-04 21:38:23 +02:00
78e1f4ef72 clean up 2026-05-04 21:24:44 +02:00
cf74b06b4a fix: use pixel bar heights and show avg rating values in trends chart 2026-05-04 21:22:47 +02:00
317898d51b fix: count distinct movies per user in users list, not total reviews 2026-05-04 21:10:32 +02:00
790bb6fbb5 fix: read BASE_URL from env for RSS channel link 2026-05-04 21:06:51 +02:00
658df38788 fix: move rss_url after user lookup, extract RSS_FEED_LIMIT constant 2026-05-04 21:05:08 +02:00
cff0f854fa feat: point RSS nav link to user feed when on profile page 2026-05-04 21:00:31 +02:00
66ade70273 feat: add GET /users/{id}/feed.rss per-user RSS feed handler 2026-05-04 20:58:20 +02:00
cbd2ac5b3e feat: add rss_url to HtmlPageContext, use it in nav 2026-05-04 20:55:31 +02:00
0433cd4d9b fix: remove unused feed_title from RssAdapter 2026-05-04 20:54:32 +02:00
b5a8ea2395 feat: add title param to render_feed, use dynamic title in RSS adapter 2026-05-04 20:52:07 +02:00
49b79799c1 feat: add user_id filter to GetDiaryQuery and get_diary use case 2026-05-04 20:49:31 +02:00
f4aba551a2 fix: derive heatmap color from primary instead of hardcoded blue 2026-05-04 20:38:13 +02:00
91df35dbd3 fix: count distinct movies in user stats, not total reviews 2026-05-04 20:35:48 +02:00
623f90e43f fix: remove timezone-broken future-date check from Review::new 2026-05-04 20:29:11 +02:00
e28f628c80 fix: remove redundant 'common' section from documentation 2026-05-04 20:28:06 +02:00
60c25d4c24 fix: update test assertion for new default page limit 2026-05-04 20:18:04 +02:00
22aafe99be fix: set domain DEFAULT_LIMIT to 5 for pagination 2026-05-04 20:16:18 +02:00
0ff22cca5f fix: remove email from top bar nav 2026-05-04 20:13:14 +02:00
ccc39e27e4 fix: lower default page limit to 5 2026-05-04 20:11:55 +02:00
76319756f4 feat: add chrono dependency to Cargo.lock 2026-05-04 19:48:40 +02:00
7703227970 fix: add missing trait stubs to test mock impls 2026-05-04 19:23:56 +02:00
b9933bb48d feat: add profile/feed/chart CSS styles 2026-05-04 19:17:39 +02:00
0c48708ce6 fix: has_more overflow, magic constant, remove dead get_index handler 2026-05-04 19:15:42 +02:00
a2a889bced feat: wire activity feed, users list, and profile page handlers 2026-05-04 19:12:06 +02:00
a4846f3bea fix: pagination underflow, remove |safe, move bar_height_pct to adapter 2026-05-04 19:09:28 +02:00
27be840faa fix: adjust domain accessors and template adapter for Askama compatibility 2026-05-04 19:03:48 +02:00
965fc0eda8 feat: add activity feed, users, and profile HTML templates 2026-05-04 19:03:44 +02:00
d700b85337 fix: correct relative_time future guard, heatmap exact match, max clarity 2026-05-04 18:57:17 +02:00
ffbab75910 feat: add Askama template structs for feed/users/profile 2026-05-04 18:55:18 +02:00
dda7c40f7f fix: validate view param, document V1 history load 2026-05-04 18:53:26 +02:00
1b827b1bdd feat: add activity feed/users/profile use cases and port methods 2026-05-04 18:48:16 +02:00
1ee6873a60 fix: address code review issues in SQLite adapter 2026-05-04 18:46:31 +02:00
7352b533ff feat: implement feed/stats/history/trends SQLite queries 2026-05-04 18:42:45 +02:00
85e254fee2 feat: impl UserRepository::list_with_stats 2026-05-04 18:40:58 +02:00
fa8221322d feat: add feed/stats SQLite row types 2026-05-04 18:32:59 +02:00
38da37de55 feat: add feed/profile/stats port methods to repositories 2026-05-04 18:30:01 +02:00
f3dedbad8a fix: use UserId newtype in UserSummary instead of raw Uuid 2026-05-04 18:29:10 +02:00
d468ce131f feat: add feed/profile domain models, extend DiaryFilter 2026-05-04 18:26:55 +02:00
d034af9e9c feat: update color scheme to use primary variables for consistency in styling 2026-05-04 17:47:00 +02:00
59d308f41b feat: enhance movie rating display with star icons and improved styling 2026-05-04 15:43:02 +02:00
bbb2ee00d6 feat: enhance styling and layout; add background image and improve UI elements 2026-05-04 15:39:15 +02:00
5dd9aac68d feat: add Dockerfile, .dockerignore, and README; remove common crate
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 15:19:29 +02:00
6dcc4c8317 Refactor movie review logging and resolution strategies
- Introduced `MovieResolver` and associated strategies for resolving movie data based on external metadata ID, manual title, or manual entry.
- Updated `log_review` use case to utilize the new `MovieResolver` for fetching movie details.
- Simplified the `LogReviewData` structure and its conversion to `LogReviewCommand`.
- Enhanced error handling for date parsing in review forms and requests.
- Updated dependencies in `Cargo.toml` and `Cargo.lock` to include necessary crates for async operations.
- Added tests for new functionality in `movie_resolver.rs` to ensure correct behavior of resolution strategies.
2026-05-04 15:08:04 +02:00
e31d99a240 fix(tests): add missing trait methods to PanicRepo mocks 2026-05-04 14:37:48 +02:00
41fec1efa5 fix(presentation): restore user_id in get_new_review_page for nav bar 2026-05-04 14:34:46 +02:00
160c08d1c4 fix(presentation): pass None user_id for non-diary page contexts 2026-05-04 14:32:30 +02:00
7aa6d7bf4d feat(template): add user_id to HtmlPageContext and delete button to diary 2026-05-04 14:31:12 +02:00
144f2f8e0c fix(presentation): use {id} syntax in delete review route 2026-05-04 14:29:24 +02:00
cff64f7a6b feat(presentation): add POST /reviews/:id/delete handler and route 2026-05-04 14:27:43 +02:00
5baff54cb9 feat(presentation): add DELETE /api/reviews/:id handler and route 2026-05-04 14:24:48 +02:00
f94d2db8b1 feat(sqlite): implement get_review_by_id, delete_review, delete_movie 2026-05-04 14:21:25 +02:00
48875a6e86 feat(application): add DeleteReviewCommand and delete_review use case 2026-05-04 14:17:13 +02:00
9387ae705b feat(domain): add get_review_by_id to MovieRepository 2026-05-04 14:15:04 +02:00
9871e21bc0 feat(gitignore): add .superpowers and docs/ to .gitignore 2026-05-04 14:00:56 +02:00
fa8efbaa23 feat(database): remove unused SQL queries and update Cargo dependencies 2026-05-04 14:00:33 +02:00
d769a5b55c feat(css): add monospace minimal stylesheet 2026-05-04 13:38:57 +02:00
8e1fb1a974 feat(routes): replace /diary with /, add login/logout/register/reviews/new routes 2026-05-04 13:38:19 +02:00
6145b873f5 feat(handlers): add HTML handlers for login, logout, register, new review, diary index 2026-05-04 13:38:16 +02:00
cc668ae44d feat(dtos): add LoginForm, RegisterForm, ErrorQuery 2026-05-04 13:38:14 +02:00
e5097c22dd feat(extractors): add OptionalCookieUser and RequiredCookieUser 2026-05-04 13:34:31 +02:00
450468ef3d feat(templates): add base layout, login, register, new_review templates; update diary 2026-05-04 13:30:33 +02:00
6e7c6467a7 feat(domain): add find_by_id to UserRepository + SQLite impl 2026-05-04 13:28:20 +02:00
7f815f8207 feat(ports): extend HtmlRenderer with page context and new render methods 2026-05-04 13:20:30 +02:00
5df89200d4 docs: add frontend HTML design spec 2026-05-04 13:19:21 +02:00
eb273dc277 fix(database): update database connection to use DATABASE_URL with SqliteConnectOptions 2026-05-04 12:54:28 +02:00
5689db0ad7 feat(wiring): wire PosterSyncHandler into event channel in main.rs 2026-05-04 12:44:53 +02:00
5c70b8b8be fix(event-handlers): expect over unwrap, panic-stub comment, fix deprecated chrono call 2026-05-04 12:42:03 +02:00
4c547df04e feat(presentation): implement PosterSyncHandler with retry 2026-05-04 12:37:57 +02:00
602df8df22 feat(application): derive Clone on SyncPosterCommand 2026-05-04 12:35:10 +02:00
5b69a3a7c0 test(event-publisher): fix flaky sleep synchronization in EventWorker tests 2026-05-04 12:34:29 +02:00
a38f78d261 feat(event-publisher): add EventHandler trait and fan-out in EventWorker 2026-05-04 12:32:44 +02:00
17f90726e8 feat(event-publisher): add event publisher adapter with configuration and integration 2026-05-04 12:30:42 +02:00
563f33212e docs: event-driven poster sync implementation plan 2026-05-04 12:30:20 +02:00
8e5ac9f433 docs: event-driven poster sync design spec 2026-05-04 12:24:52 +02:00
f790fa2a0f feat(rss): implement RSS feed adapter and integrate with application state 2026-05-04 12:03:17 +02:00
edcf3c1170 feat(poster-fetcher): add poster fetcher adapter with configuration and integration 2026-05-04 11:51:20 +02:00
1985d2c57f feat(poster-storage): implement S3/Minio storage adapter and configuration
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 11:44:44 +02:00
f0b3d8ad90 feat(log_review): add manual title resolution for movie lookup 2026-05-04 11:24:18 +02:00
da72ab1446 feat(metadata): Implement OMDB metadata provider and refactor metadata client
- Added `OmdbProvider` to fetch movie metadata from the OMDB API.
- Refactored `MetadataClient` to use `MetadataSearchCriteria` for fetching movie metadata.
- Updated `MetadataClientImpl` to support fetching metadata using OMDB.
- Modified `log_review` use case to utilize the new metadata fetching mechanism.
- Updated tests and presentation layer to accommodate changes in metadata handling.
- Added dependencies for `reqwest` and `async-trait` in relevant `Cargo.toml` files.
2026-05-04 11:19:51 +02:00
93c65cd155 feat(auth): implement JWT authentication and user registration
- Added JWT authentication with token generation and validation.
- Introduced user registration functionality with email and password.
- Integrated Argon2 for password hashing.
- Created SQLite user repository for user data persistence.
- Updated application context to include user repository and configuration settings.
- Added environment variable support for JWT secret and registration allowance.
- Enhanced error handling for unauthorized access and validation errors.
- Updated presentation layer to handle login and registration requests.
2026-05-04 10:43:07 +02:00
110 changed files with 7853 additions and 528 deletions

2
.cargo/config.toml Normal file
View File

@@ -0,0 +1,2 @@
[env]
SQLX_OFFLINE = "true"

10
.dockerignore Normal file
View File

@@ -0,0 +1,10 @@
target/
.git/
.env
*.db
*.db-shm
*.db-wal
.cargo/
.sqlx/
docs/
dev.db

View File

@@ -0,0 +1,13 @@
DATABASE_URL=sqlite:./dev.db
BASE_URL=http://localhost:3000
PORT=3000
SECURE_COOKIES=false
JWT_SECRET=
JWT_TTL_SECONDS=
ALLOW_REGISTRATION=true
OMDB_API_KEY=
POSTER_FETCH_TIMEOUT_SECONDS=30
MINIO_ENDPOINT=
MINIO_ACCESS_KEY_ID=
MINIO_SECRET_ACCESS_KEY=
MINIO_BUCKET=

6
.gitignore vendored
View File

@@ -8,5 +8,9 @@
.env.prod .env.prod
*.db *.db
*db-shm
*db-wal
.worktrees/ .worktrees/
.superpowers/
docs/

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews WHERE user_id = ?",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
}

View File

@@ -0,0 +1,98 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,\n u.email AS user_email\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n INNER JOIN users u ON u.id = r.user_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
},
{
"name": "user_email",
"ordinal": 13,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false,
false
]
},
"hash": "217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT strftime('%Y-%m', watched_at) AS month\n FROM reviews\n WHERE user_id = ?\n GROUP BY month\n ORDER BY COUNT(*) DESC\n LIMIT 1",
"describe": {
"columns": [
{
"name": "month",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(DISTINCT movie_id) AS \"total!: i64\",\n AVG(CAST(rating AS REAL)) AS avg_rating\n FROM reviews WHERE user_id = ?",
"describe": {
"columns": [
{
"name": "total!: i64",
"ordinal": 0,
"type_info": "Integer"
},
{
"name": "avg_rating",
"ordinal": 1,
"type_info": "Float"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true
]
},
"hash": "a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.rating DESC, r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT m.director AS \"director!\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 5",
"describe": {
"columns": [
{
"name": "director!",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "count!: i64",
"ordinal": 1,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true,
false
]
},
"hash": "aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT m.director\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 1",
"describe": {
"columns": [
{
"name": "director",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM movies WHERE id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "SELECT u.id,\n u.email,\n COUNT(DISTINCT r.movie_id) AS \"total_movies!: i64\",\n AVG(CAST(r.rating AS REAL)) AS avg_rating\n FROM users u\n LEFT JOIN reviews r ON r.user_id = u.id\n GROUP BY u.id, u.email\n ORDER BY u.email ASC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "total_movies!: i64",
"ordinal": 2,
"type_info": "Integer"
},
{
"name": "avg_rating",
"ordinal": 3,
"type_info": "Float"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
true
]
},
"hash": "f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM reviews WHERE id = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT strftime('%Y-%m', watched_at) AS \"month!\",\n AVG(CAST(rating AS REAL)) AS \"avg_rating!: f64\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews\n WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')\n GROUP BY \"month!\"\n ORDER BY \"month!\" ASC",
"describe": {
"columns": [
{
"name": "month!",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "avg_rating!: f64",
"ordinal": 1,
"type_info": "Float"
},
{
"name": "count!: i64",
"ordinal": 2,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true,
false,
false
]
},
"hash": "fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317"
}

1116
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,14 @@
[workspace] [workspace]
members = [ members = [
"crates/adapters/auth", "crates/adapters/auth",
"crates/adapters/event-publisher",
"crates/adapters/metadata", "crates/adapters/metadata",
"crates/adapters/poster-fetcher",
"crates/adapters/poster-storage",
"crates/adapters/rss", "crates/adapters/rss",
"crates/adapters/sqlite", "crates/adapters/template-askama", "crates/adapters/sqlite",
"crates/adapters/template-askama",
"crates/application", "crates/application",
"crates/common",
"crates/domain", "crates/domain",
"crates/presentation", "crates/presentation",
] ]
@@ -13,6 +16,7 @@ resolver = "2"
[workspace.dependencies] [workspace.dependencies]
tokio = { version = "1.0", features = ["full"] } tokio = { version = "1.0", features = ["full"] }
dotenvy = "0.15"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
anyhow = "1.0" anyhow = "1.0"
@@ -22,14 +26,23 @@ tracing-subscriber = { version = "0.3.23", features = ["env-filter"] }
async-trait = "0.1" async-trait = "0.1"
uuid = { version = "1.23.0", features = ["v4", "serde"] } uuid = { version = "1.23.0", features = ["v4", "serde"] }
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "sqlite", "uuid", "macros"] } sqlx = { version = "0.8.6", features = [
template-askama = { path = "crates/adapters/template-askama" } "runtime-tokio-rustls",
"sqlite",
"uuid",
"macros",
] }
reqwest = { version = "0.13", features = ["json", "query"] }
object_store = { version = "0.11", features = ["aws"] }
domain = { path = "crates/domain" } domain = { path = "crates/domain" }
common = { path = "crates/common" }
application = { path = "crates/application" } application = { path = "crates/application" }
presentation = { path = "crates/presentation" } presentation = { path = "crates/presentation" }
auth = { path = "crates/adapters/auth" } auth = { path = "crates/adapters/auth" }
metadata = { path = "crates/adapters/metadata" } metadata = { path = "crates/adapters/metadata" }
poster-fetcher = { path = "crates/adapters/poster-fetcher" }
poster-storage = { path = "crates/adapters/poster-storage" }
event-publisher = { path = "crates/adapters/event-publisher" }
rss = { path = "crates/adapters/rss" } rss = { path = "crates/adapters/rss" }
sqlite = { path = "crates/adapters/sqlite" } sqlite = { path = "crates/adapters/sqlite" }
template-askama = { path = "crates/adapters/template-askama" }

57
Dockerfile Normal file
View File

@@ -0,0 +1,57 @@
# ----- build -----
FROM rust:slim-bookworm AS builder
RUN apt-get update && apt-get install -y --no-install-recommends sqlite3 && rm -rf /var/lib/apt/lists/*
WORKDIR /build
# Cache dependency compilation separately from source
COPY Cargo.toml Cargo.lock ./
COPY crates/adapters/auth/Cargo.toml crates/adapters/auth/Cargo.toml
COPY crates/adapters/event-publisher/Cargo.toml crates/adapters/event-publisher/Cargo.toml
COPY crates/adapters/metadata/Cargo.toml crates/adapters/metadata/Cargo.toml
COPY crates/adapters/poster-fetcher/Cargo.toml crates/adapters/poster-fetcher/Cargo.toml
COPY crates/adapters/poster-storage/Cargo.toml crates/adapters/poster-storage/Cargo.toml
COPY crates/adapters/rss/Cargo.toml crates/adapters/rss/Cargo.toml
COPY crates/adapters/sqlite/Cargo.toml crates/adapters/sqlite/Cargo.toml
COPY crates/adapters/template-askama/Cargo.toml crates/adapters/template-askama/Cargo.toml
COPY crates/application/Cargo.toml crates/application/Cargo.toml
COPY crates/domain/Cargo.toml crates/domain/Cargo.toml
COPY crates/presentation/Cargo.toml crates/presentation/Cargo.toml
# Stub every crate so cargo can resolve and fetch deps
RUN find crates -name "Cargo.toml" | sed 's|/Cargo.toml||' | \
xargs -I{} sh -c 'mkdir -p {}/src && echo "fn main(){}" > {}/src/main.rs && echo "" > {}/src/lib.rs'
RUN cargo fetch
# Now copy real sources (invalidates cache only on source changes)
COPY crates ./crates
# sqlx macros verify queries at compile time; create a real DB from migrations
RUN sqlite3 /build/dev.db \
< crates/adapters/sqlite/migrations/0001_initial.sql && \
sqlite3 /build/dev.db \
< crates/adapters/sqlite/migrations/0002_users.sql
ENV DATABASE_URL=sqlite:///build/dev.db
RUN cargo build --release -p presentation
# ----- runtime -----
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY --from=builder /build/target/release/presentation ./presentation
COPY static ./static
EXPOSE 3000
ENV RUST_LOG=presentation=info,tower_http=info
CMD ["./presentation"]

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Gabriel Kaszewski
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

85
README.md Normal file
View File

@@ -0,0 +1,85 @@
# Movies Diary
A self-hosted, server-side rendered movie logging system. Built in Rust — no JavaScript, no SPA, just HTML forms and an RSS feed. Designed to run as a lightweight widget embedded on a personal site.
## Features
- Log movies with a TMDB/OMDb ID and a 05 rating
- Immutable append-only viewing ledger (tracks re-watches)
- Background poster fetching and storage (S3-compatible)
- RSS/Atom feed for public subscription
- JWT authentication via cookie (HTML) or Bearer token (REST API)
- Zero JavaScript
## Architecture
Hexagonal (Ports & Adapters) with Domain-Driven Design:
```
domain — pure types and trait definitions, no external deps
application — use cases / business logic orchestration
presentation — Axum HTTP router, wires all adapters together
adapters/
auth — JWT issuance and validation (Argon2 passwords)
sqlite — SQLite repository via sqlx
metadata — OMDb HTTP client
poster-fetcher — downloads poster images
poster-storage — uploads posters to S3-compatible storage
template-askama — Askama HTML rendering
rss — RSS/Atom feed generation
event-publisher — async event channel for background poster sync
```
## Prerequisites
- Rust (stable, 2024 edition)
- SQLite
- An S3-compatible object store (e.g. MinIO) for poster storage
- An [OMDb API key](https://www.omdbapi.com/apikey.aspx)
## Environment Variables
Copy and fill in the following (e.g. in a `.env` file):
```env
# Database
DATABASE_URL=sqlite://movies.db
# Authentication
JWT_SECRET=change-me
JWT_TTL_SECONDS=86400
# OMDb metadata
OMDB_API_KEY=your-key
# Poster storage (S3-compatible)
MINIO_ENDPOINT=http://localhost:9000
MINIO_BUCKET=posters
MINIO_REGION=us-east-1
MINIO_ACCESS_KEY_ID=minioadmin
MINIO_SECRET_ACCESS_KEY=minioadmin
# Optional
ALLOW_REGISTRATION=false
POSTER_FETCH_TIMEOUT_SECONDS=10
EVENT_CHANNEL_BUFFER=32
RUST_LOG=presentation=debug,tower_http=debug
```
## Run
```bash
cargo run -p presentation
```
Server listens on `0.0.0.0:3000`.
## Test
```bash
cargo test
```
## License
MIT License. See [LICENSE](LICENSE).

View File

@@ -6,3 +6,10 @@ edition = "2024"
[dependencies] [dependencies]
async-trait = { workspace = true } async-trait = { workspace = true }
domain = { workspace = true } domain = { workspace = true }
anyhow = { workspace = true }
chrono = { workspace = true }
uuid = { workspace = true }
serde = { version = "1.0", features = ["derive"] }
jsonwebtoken = "9"
argon2 = { version = "0.5", features = ["std"] }
rand_core = { version = "0.6", features = ["getrandom"] }

View File

@@ -1,13 +1,104 @@
use async_trait::async_trait; use async_trait::async_trait;
use domain::{errors::DomainError, ports::AuthService, value_objects::UserId}; use argon2::{
Argon2,
password_hash::{PasswordHasher as _, PasswordVerifier, SaltString},
};
use chrono::{Duration, Utc};
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation, decode, encode};
use rand_core::OsRng;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
pub struct StubAuthService; use domain::{
errors::DomainError,
ports::{AuthService, GeneratedToken, PasswordHasher},
value_objects::{PasswordHash, UserId},
};
#[async_trait] pub struct AuthConfig {
impl AuthService for StubAuthService { secret: String,
async fn validate_token(&self, _token: &str) -> Result<UserId, DomainError> { ttl_seconds: u64,
Err(DomainError::InfrastructureError( }
"auth service not implemented".into(),
)) impl AuthConfig {
pub fn from_env() -> anyhow::Result<Self> {
let secret = std::env::var("JWT_SECRET")
.map_err(|_| anyhow::anyhow!("JWT_SECRET env var is required"))?;
if secret.is_empty() {
anyhow::bail!("JWT_SECRET must not be empty");
}
let ttl_seconds = std::env::var("JWT_TTL_SECONDS")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(86400u64);
Ok(Self { secret, ttl_seconds })
}
}
#[derive(Serialize, Deserialize)]
struct Claims {
sub: String,
exp: u64,
}
pub struct JwtAuthService {
config: AuthConfig,
}
impl JwtAuthService {
pub fn new(config: AuthConfig) -> Self {
Self { config }
}
}
#[async_trait]
impl AuthService for JwtAuthService {
async fn generate_token(&self, user_id: &UserId) -> Result<GeneratedToken, DomainError> {
let expires_at = Utc::now() + Duration::seconds(self.config.ttl_seconds as i64);
let claims = Claims {
sub: user_id.value().to_string(),
exp: expires_at.timestamp() as u64,
};
let token = encode(
&Header::default(),
&claims,
&EncodingKey::from_secret(self.config.secret.as_bytes()),
)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(GeneratedToken { token, expires_at })
}
async fn validate_token(&self, token: &str) -> Result<UserId, DomainError> {
let data = decode::<Claims>(
token,
&DecodingKey::from_secret(self.config.secret.as_bytes()),
&Validation::default(),
)
.map_err(|_| DomainError::Unauthorized("Invalid or expired token".into()))?;
let uuid = Uuid::parse_str(&data.claims.sub)
.map_err(|_| DomainError::Unauthorized("Invalid token subject".into()))?;
Ok(UserId::from_uuid(uuid))
}
}
pub struct Argon2PasswordHasher;
#[async_trait]
impl PasswordHasher for Argon2PasswordHasher {
async fn hash(&self, plain_password: &str) -> Result<PasswordHash, DomainError> {
let salt = SaltString::generate(&mut OsRng);
let hash = Argon2::default()
.hash_password(plain_password.as_bytes(), &salt)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
.to_string();
PasswordHash::new(hash).map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
async fn verify(&self, plain_password: &str, hash: &PasswordHash) -> Result<bool, DomainError> {
let parsed = argon2::password_hash::PasswordHash::new(hash.value())
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(Argon2::default()
.verify_password(plain_password.as_bytes(), &parsed)
.is_ok())
} }
} }

View File

@@ -0,0 +1,10 @@
[package]
name = "event-publisher"
version = "0.1.0"
edition = "2024"
[dependencies]
domain = { workspace = true }
async-trait = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }

View File

@@ -0,0 +1,209 @@
use async_trait::async_trait;
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
use tokio::sync::mpsc;
pub struct EventPublisherConfig {
pub channel_buffer: usize,
}
impl EventPublisherConfig {
pub fn from_env() -> Self {
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(128);
Self { channel_buffer }
}
}
#[async_trait]
pub trait EventHandler: Send + Sync {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
}
pub struct ChannelEventPublisher {
sender: mpsc::Sender<DomainEvent>,
}
#[async_trait]
impl EventPublisher for ChannelEventPublisher {
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
self.sender
.send(event.clone())
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
}
pub struct EventWorker {
receiver: mpsc::Receiver<DomainEvent>,
handlers: Vec<Box<dyn EventHandler>>,
}
impl EventWorker {
pub async fn run(mut self) {
while let Some(event) = self.receiver.recv().await {
match &event {
DomainEvent::ReviewLogged {
review_id,
movie_id,
user_id,
rating,
watched_at,
} => {
tracing::info!(
review_id = %review_id.value(),
movie_id = %movie_id.value(),
user_id = %user_id.value(),
rating = rating.value(),
watched_at = %watched_at,
"event: review_logged"
);
}
DomainEvent::MovieDiscovered {
movie_id,
external_metadata_id,
} => {
tracing::info!(
movie_id = %movie_id.value(),
external_id = external_metadata_id.value(),
"event: movie_discovered"
);
}
}
for handler in &self.handlers {
if let Err(e) = handler.handle(&event).await {
tracing::error!("event handler error: {e}");
}
}
}
tracing::info!("event worker shut down");
}
}
pub struct NoopEventPublisher;
#[async_trait]
impl EventPublisher for NoopEventPublisher {
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
Ok(())
}
}
pub fn create_event_channel(
config: EventPublisherConfig,
handlers: Vec<Box<dyn EventHandler>>,
) -> (ChannelEventPublisher, EventWorker) {
let (tx, rx) = mpsc::channel(config.channel_buffer);
(
ChannelEventPublisher { sender: tx },
EventWorker {
receiver: rx,
handlers,
},
)
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::{Arc, Mutex};
use domain::{
errors::DomainError,
events::DomainEvent,
value_objects::{ExternalMetadataId, MovieId},
};
struct RecordingHandler {
calls: Arc<Mutex<Vec<String>>>,
}
#[async_trait]
impl EventHandler for RecordingHandler {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
let label = match event {
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
DomainEvent::ReviewLogged { .. } => "review_logged",
};
self.calls.lock().unwrap().push(label.to_string());
Ok(())
}
}
#[tokio::test]
async fn single_handler_receives_event() {
let calls = Arc::new(Mutex::new(vec![]));
let handler = RecordingHandler { calls: Arc::clone(&calls) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
let handle = tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
drop(publisher);
handle.await.unwrap();
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
}
#[tokio::test]
async fn multiple_handlers_all_receive_event() {
let calls1 = Arc::new(Mutex::new(vec![]));
let calls2 = Arc::new(Mutex::new(vec![]));
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(
config,
vec![Box::new(handler1), Box::new(handler2)],
);
let handle = tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
drop(publisher);
handle.await.unwrap();
assert_eq!(calls1.lock().unwrap().len(), 1);
assert_eq!(calls2.lock().unwrap().len(), 1);
}
#[tokio::test]
async fn handler_error_does_not_stop_worker() {
struct FailingHandler;
#[async_trait]
impl EventHandler for FailingHandler {
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
Err(DomainError::InfrastructureError("boom".into()))
}
}
let calls = Arc::new(Mutex::new(vec![]));
let good = RecordingHandler { calls: Arc::clone(&calls) };
let config = EventPublisherConfig { channel_buffer: 8 };
let (publisher, worker) = create_event_channel(
config,
vec![Box::new(FailingHandler), Box::new(good)],
);
let handle = tokio::spawn(worker.run());
let event = DomainEvent::MovieDiscovered {
movie_id: MovieId::generate(),
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
};
publisher.publish(&event).await.unwrap();
drop(publisher);
handle.await.unwrap();
assert_eq!(calls.lock().unwrap().len(), 1);
}
}

View File

@@ -4,3 +4,7 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
async-trait = { workspace = true }
reqwest = { workspace = true }
serde = { workspace = true }
domain = { workspace = true }

View File

@@ -1,14 +1,54 @@
pub fn add(left: u64, right: u64) -> u64 { use async_trait::async_trait;
left + right use domain::{
errors::DomainError,
models::Movie,
ports::{MetadataClient, MetadataSearchCriteria},
value_objects::{ExternalMetadataId, MovieTitle, PosterUrl, ReleaseYear},
};
mod omdb;
pub(crate) struct ProviderMovie {
pub imdb_id: ExternalMetadataId,
pub title: MovieTitle,
pub release_year: ReleaseYear,
pub director: Option<String>,
pub poster_url: Option<PosterUrl>,
} }
#[cfg(test)] #[async_trait]
mod tests { pub(crate) trait MetadataProvider: Send + Sync {
use super::*; async fn fetch(&self, criteria: &MetadataSearchCriteria) -> Result<ProviderMovie, DomainError>;
}
#[test] pub struct MetadataClientImpl {
fn it_works() { provider: Box<dyn MetadataProvider>,
let result = add(2, 2); }
assert_eq!(result, 4);
impl MetadataClientImpl {
pub fn new_omdb(api_key: String) -> Self {
Self {
provider: Box::new(omdb::OmdbProvider::new(api_key)),
}
}
}
#[async_trait]
impl MetadataClient for MetadataClientImpl {
async fn fetch_movie_metadata(
&self,
criteria: &MetadataSearchCriteria,
) -> Result<Movie, DomainError> {
let pm = self.provider.fetch(criteria).await?;
Ok(Movie::new(Some(pm.imdb_id), pm.title, pm.release_year, pm.director, None))
}
async fn get_poster_url(
&self,
external_metadata_id: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
let criteria = MetadataSearchCriteria::ImdbId(external_metadata_id.clone());
let pm = self.provider.fetch(&criteria).await?;
Ok(pm.poster_url)
} }
} }

View File

@@ -0,0 +1,119 @@
use async_trait::async_trait;
use domain::{
errors::DomainError,
ports::MetadataSearchCriteria,
value_objects::{ExternalMetadataId, MovieTitle, PosterUrl, ReleaseYear},
};
use serde::Deserialize;
use crate::{MetadataProvider, ProviderMovie};
pub(crate) struct OmdbProvider {
client: reqwest::Client,
api_key: String,
base_url: String,
}
impl OmdbProvider {
pub(crate) fn new(api_key: String) -> Self {
Self {
client: reqwest::Client::new(),
api_key,
base_url: "http://www.omdbapi.com/".to_string(),
}
}
}
#[derive(Deserialize)]
struct OmdbResponse {
#[serde(rename = "Title")]
title: String,
#[serde(rename = "Year")]
year: String,
#[serde(rename = "Director")]
director: String,
#[serde(rename = "Poster")]
poster: String,
#[serde(rename = "imdbID")]
imdb_id: String,
#[serde(rename = "Response")]
response: String,
#[serde(rename = "Error")]
error: Option<String>,
}
#[async_trait]
impl MetadataProvider for OmdbProvider {
async fn fetch(&self, criteria: &MetadataSearchCriteria) -> Result<ProviderMovie, DomainError> {
let mut url = reqwest::Url::parse(&self.base_url)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
{
let mut params = url.query_pairs_mut();
params.append_pair("apikey", &self.api_key);
match criteria {
MetadataSearchCriteria::ImdbId(id) => {
params.append_pair("i", id.value());
}
MetadataSearchCriteria::Title { title, year } => {
params.append_pair("t", title);
if let Some(y) = year {
params.append_pair("y", &y.to_string());
}
}
}
}
let http_resp = self
.client
.get(url)
.send()
.await
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?
.error_for_status()
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?;
let resp: OmdbResponse = http_resp
.json()
.await
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?;
if resp.response != "True" {
let msg = resp.error.unwrap_or_default();
return if msg.to_lowercase().contains("not found") {
Err(DomainError::NotFound(msg))
} else {
Err(DomainError::InfrastructureError(msg))
};
}
let year: u16 = resp
.year
.chars()
.take(4)
.collect::<String>()
.parse()
.map_err(|_| {
DomainError::InfrastructureError(format!("Unparseable year: {}", resp.year))
})?;
let imdb_id = ExternalMetadataId::new(resp.imdb_id)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let title = MovieTitle::new(resp.title)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let release_year = ReleaseYear::new(year)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let director = match resp.director.as_str() {
"N/A" | "" => None,
d => Some(d.to_string()),
};
let poster_url = match resp.poster.as_str() {
"N/A" | "" => None,
url => PosterUrl::new(url.to_string()).ok(),
};
Ok(ProviderMovie { imdb_id, title, release_year, director, poster_url })
}
}

View File

@@ -0,0 +1,10 @@
[package]
name = "poster-fetcher"
version = "0.1.0"
edition = "2021"
[dependencies]
domain = { workspace = true }
async-trait = { workspace = true }
reqwest = { workspace = true }
anyhow = { workspace = true }

View File

@@ -0,0 +1,13 @@
pub struct PosterFetcherConfig {
pub timeout_seconds: u64,
}
impl PosterFetcherConfig {
pub fn from_env() -> Self {
let timeout_seconds = std::env::var("POSTER_FETCH_TIMEOUT_SECONDS")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(30);
Self { timeout_seconds }
}
}

View File

@@ -0,0 +1,38 @@
mod config;
pub use config::PosterFetcherConfig;
use std::time::Duration;
use async_trait::async_trait;
use domain::{errors::DomainError, ports::PosterFetcherClient, value_objects::PosterUrl};
pub struct ReqwestPosterFetcher {
client: reqwest::Client,
}
impl ReqwestPosterFetcher {
pub fn new(config: PosterFetcherConfig) -> anyhow::Result<Self> {
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(config.timeout_seconds))
.build()?;
Ok(Self { client })
}
}
#[async_trait]
impl PosterFetcherClient for ReqwestPosterFetcher {
async fn fetch_poster_bytes(&self, poster_url: &PosterUrl) -> Result<Vec<u8>, DomainError> {
let bytes = self
.client
.get(poster_url.value())
.send()
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
.error_for_status()
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
.bytes()
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(bytes.to_vec())
}
}

View File

@@ -0,0 +1,16 @@
[package]
name = "poster-storage"
version = "0.1.0"
edition = "2024"
[dependencies]
domain = { workspace = true }
anyhow = { workspace = true }
async-trait = { workspace = true }
tracing = { workspace = true }
object_store = { workspace = true }
infer = "0.19.0"
[dev-dependencies]
tokio = { workspace = true }
uuid = { workspace = true }

View File

@@ -0,0 +1,38 @@
use anyhow::Context;
use object_store::{aws::AmazonS3Builder, ObjectStore};
use std::sync::Arc;
pub struct StorageConfig {
endpoint: String,
access_key_id: String,
secret_access_key: String,
bucket: String,
region: String,
}
impl StorageConfig {
pub fn from_env() -> anyhow::Result<Self> {
Ok(Self {
endpoint: std::env::var("MINIO_ENDPOINT").context("MINIO_ENDPOINT required")?,
access_key_id: std::env::var("MINIO_ACCESS_KEY_ID")
.context("MINIO_ACCESS_KEY_ID required")?,
secret_access_key: std::env::var("MINIO_SECRET_ACCESS_KEY")
.context("MINIO_SECRET_ACCESS_KEY required")?,
bucket: std::env::var("MINIO_BUCKET").context("MINIO_BUCKET required")?,
region: std::env::var("MINIO_REGION").unwrap_or_else(|_| "minio".to_string()),
})
}
pub fn build_store(self) -> anyhow::Result<Arc<dyn ObjectStore>> {
let store = AmazonS3Builder::new()
.with_endpoint(self.endpoint)
.with_access_key_id(self.access_key_id)
.with_secret_access_key(self.secret_access_key)
.with_bucket_name(self.bucket)
.with_region(self.region)
.with_allow_http(true)
.build()
.context("Failed to build S3/Minio store")?;
Ok(Arc::new(store))
}
}

View File

@@ -0,0 +1,95 @@
mod config;
pub use config::StorageConfig;
use async_trait::async_trait;
use domain::{
errors::DomainError,
ports::PosterStorage,
value_objects::{MovieId, PosterPath},
};
use object_store::{Attribute, Attributes, PutOptions, path::Path, ObjectStore};
use std::sync::Arc;
fn detect_mime(bytes: &[u8]) -> &'static str {
infer::get(bytes)
.map(|t| t.mime_type())
.unwrap_or("application/octet-stream")
}
pub struct PosterStorageAdapter {
store: Arc<dyn ObjectStore>,
}
impl PosterStorageAdapter {
pub fn new(store: Arc<dyn ObjectStore>) -> Self {
Self { store }
}
pub fn from_config(config: StorageConfig) -> anyhow::Result<Self> {
Ok(Self::new(config.build_store()?))
}
}
#[async_trait]
impl PosterStorage for PosterStorageAdapter {
async fn store_poster(
&self,
movie_id: &MovieId,
image_bytes: &[u8],
) -> Result<PosterPath, DomainError> {
let path = Path::from(movie_id.value().to_string());
let mime = detect_mime(image_bytes);
let mut attributes = Attributes::new();
attributes.insert(Attribute::ContentType, mime.into());
let opts = PutOptions { attributes, ..Default::default() };
self.store
.put_opts(&path, image_bytes.to_vec().into(), opts)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
PosterPath::new(path.to_string())
}
async fn get_poster(&self, poster_path: &PosterPath) -> Result<Vec<u8>, DomainError> {
let path = Path::from(poster_path.value().to_string());
let result = self.store.get(&path).await.map_err(|e| match e {
object_store::Error::NotFound { .. } => DomainError::NotFound("Poster not found".into()),
_ => DomainError::InfrastructureError(e.to_string()),
})?;
result
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
}
#[cfg(test)]
mod tests {
use super::*;
use object_store::memory::InMemory;
use uuid::Uuid;
fn adapter() -> PosterStorageAdapter {
PosterStorageAdapter::new(Arc::new(InMemory::new()))
}
#[tokio::test]
async fn store_and_retrieve_round_trip() {
let adapter = adapter();
let movie_id = MovieId::from_uuid(Uuid::new_v4());
let bytes = b"fake-image-bytes";
let path = adapter.store_poster(&movie_id, bytes).await.unwrap();
let retrieved = adapter.get_poster(&path).await.unwrap();
assert_eq!(retrieved, bytes);
}
#[tokio::test]
async fn get_missing_returns_not_found() {
let adapter = adapter();
let path = PosterPath::new("nonexistent".into()).unwrap();
let result = adapter.get_poster(&path).await;
assert!(matches!(result, Err(DomainError::NotFound(_))));
}
}

View File

@@ -4,3 +4,7 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
rss-feed = { package = "rss", version = "2" }
chrono = { workspace = true }
domain = { workspace = true }
application = { workspace = true }

View File

@@ -1,5 +1,59 @@
pub fn add(left: u64, right: u64) -> u64 { use application::ports::RssFeedRenderer;
left + right use domain::models::DiaryEntry;
use rss_feed::{ChannelBuilder, GuidBuilder, ItemBuilder};
pub struct RssAdapter {
feed_link: String,
}
impl RssAdapter {
pub fn new(feed_link: String) -> Self {
Self { feed_link }
}
}
impl RssFeedRenderer for RssAdapter {
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String> {
let items = entries
.iter()
.map(|e| {
let item_title = format!(
"{} ({})",
e.movie().title().value(),
e.movie().release_year().value()
);
let description = match e.review().comment() {
Some(c) => format!("{}/5 — {}", e.review().rating().value(), c.value()),
None => format!("{}/5", e.review().rating().value()),
};
let pub_date = e
.review()
.watched_at()
.and_utc()
.format("%a, %d %b %Y %H:%M:%S +0000")
.to_string();
let guid = GuidBuilder::default()
.value(e.review().id().value().to_string())
.permalink(false)
.build();
ItemBuilder::default()
.title(Some(item_title))
.description(Some(description))
.pub_date(Some(pub_date))
.guid(Some(guid))
.build()
})
.collect::<Vec<_>>();
let channel = ChannelBuilder::default()
.title(title.to_string())
.link(self.feed_link.clone())
.description(title.to_string())
.items(items)
.build();
Ok(channel.to_string())
}
} }
#[cfg(test)] #[cfg(test)]
@@ -7,8 +61,16 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn it_works() { fn render_feed_uses_provided_title() {
let result = add(2, 2); let adapter = RssAdapter::new("http://example.com".into());
assert_eq!(result, 4); let xml = adapter.render_feed(&[], "Custom Title").unwrap();
assert!(xml.contains("<title>Custom Title</title>"));
}
#[test]
fn render_feed_empty_entries_produces_valid_xml() {
let adapter = RssAdapter::new("http://example.com".into());
let xml = adapter.render_feed(&[], "My Feed").unwrap();
assert!(xml.starts_with("<?xml") || xml.starts_with("<rss"));
} }
} }

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "01a08873b7fa815ad98a56a0902b60414cfcdc2c7a8570351320c4bc425347c6"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "026e2afeb573707cb360fcdab8f6137aabfaf603b5ed57b98ac2888b4a0389ff"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false
]
},
"hash": "0963b9661182e139cd760bbabb0d6ea3a301a2a3adbdfdda4a88f333a1144c77"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "email",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "password_hash",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE title = ? AND release_year = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "3047579c6ed13ce87aad9b9ce6300c02f0df3516979518976e13f9d9abc6a403"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "33d0dae7d16b0635c1c7eb5afd10824bb55af7cc7a854f590d326622863759d1"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "47f7cf95ce3450635b643ab710cadba96f40319140834d510bc5207b2552e055"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews WHERE movie_id = ?",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "4b3074b532342c6356ee0e8e4d8c4a830f016234bb690e1f6240f02824d6d84f"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "630e092fcd33bc312befef352a98225e6e18e6079644b949258a39bf4b0fe3e5"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE external_metadata_id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "7bc4aebcb94547976d3d7e063e4e908fc22b977b3cbf063ee93ffe4648c42011"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n external_metadata_id = excluded.external_metadata_id,\n title = excluded.title,\n release_year = excluded.release_year,\n director = excluded.director,\n poster_path = excluded.poster_path",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "7d7e23355ee0e442f2aa27e898dcfa40bdc4b09391afe04325f076157d9d84aa"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "af883f8b78f185077e2d3dcfaa0a6e62fbdfbf00c97c9b33b699dc631476181d"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "affe1eb261283c09d4b1ce6e684681755f079a044ffec8ff2bd79cfd8efe16b8"
}

View File

@@ -0,0 +1,6 @@
CREATE TABLE IF NOT EXISTS users (
id TEXT PRIMARY KEY NOT NULL,
email TEXT UNIQUE NOT NULL,
password_hash TEXT NOT NULL,
created_at TEXT NOT NULL
);

View File

@@ -3,18 +3,38 @@ use domain::{
errors::DomainError, errors::DomainError,
events::DomainEvent, events::DomainEvent,
models::{ models::{
DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, SortDirection, DiaryEntry, DiaryFilter, DirectorStat, FeedEntry, Movie, MonthlyRating,
collections::Paginated, Review, ReviewHistory, SortDirection, UserStats, UserTrends,
collections::{PageParams, Paginated},
}, },
ports::MovieRepository, ports::MovieRepository,
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear}, value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear, ReviewId, UserId},
}; };
use sqlx::SqlitePool; use sqlx::SqlitePool;
mod migrations; mod migrations;
mod models; mod models;
mod users;
use models::{DiaryRow, MovieRow, ReviewRow, datetime_to_str}; use models::{
DiaryRow, DirectorCountRow, FeedRow, MonthlyRatingRow, MovieRow, ReviewRow,
UserTotalsRow, datetime_to_str,
};
pub use users::SqliteUserRepository;
fn format_year_month(ym: &str) -> String {
let parts: Vec<&str> = ym.splitn(2, '-').collect();
if parts.len() != 2 { return ym.to_string(); }
let year = parts[0].get(2..).unwrap_or(parts[0]);
let month = match parts[1] {
"01" => "Jan", "02" => "Feb", "03" => "Mar", "04" => "Apr",
"05" => "May", "06" => "Jun", "07" => "Jul", "08" => "Aug",
"09" => "Sep", "10" => "Oct", "11" => "Nov", "12" => "Dec",
_ => parts[1],
};
format!("{} '{}", month, year)
}
pub struct SqliteMovieRepository { pub struct SqliteMovieRepository {
pool: SqlitePool, pool: SqlitePool,
@@ -49,17 +69,15 @@ impl SqliteMovieRepository {
} }
} }
async fn fetch_diary_rows( async fn fetch_all_diary_rows(
&self, &self,
movie_id: Option<&str>,
sort: &SortDirection, sort: &SortDirection,
limit: i64, limit: i64,
offset: i64, offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> { ) -> Result<Vec<DiaryRow>, DomainError> {
// sqlx macros require literal ORDER BY values; separate branches also let the match sort {
// query planner use the movie_id index instead of falling back to a filtered scan. // ByRatingDesc only applies to user-scoped queries; falls back to date sort here
match (movie_id, sort) { SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
(None, SortDirection::Descending) => sqlx::query_as!(
DiaryRow, DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path, "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
@@ -74,7 +92,7 @@ impl SqliteMovieRepository {
.await .await
.map_err(Self::map_err), .map_err(Self::map_err),
(None, SortDirection::Ascending) => sqlx::query_as!( SortDirection::Ascending => sqlx::query_as!(
DiaryRow, DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path, "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
@@ -88,42 +106,188 @@ impl SqliteMovieRepository {
.fetch_all(&self.pool) .fetch_all(&self.pool)
.await .await
.map_err(Self::map_err), .map_err(Self::map_err),
(Some(id), SortDirection::Descending) => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
(Some(id), SortDirection::Ascending) => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at ASC
LIMIT ? OFFSET ?",
id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
} }
} }
async fn fetch_movie_diary_rows(
&self,
movie_id: &str,
sort: &SortDirection,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
match sort {
// ByRatingDesc only applies to user-scoped queries; falls back to date sort here
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
movie_id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
SortDirection::Ascending => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at ASC
LIMIT ? OFFSET ?",
movie_id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
}
}
async fn count_user_diary_entries(&self, user_id: &str) -> Result<i64, DomainError> {
sqlx::query_scalar!(
"SELECT COUNT(*) FROM reviews WHERE user_id = ?",
user_id
)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_diary_rows_by_watched(
&self,
user_id: &str,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
user_id, limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_diary_rows_by_rating(
&self,
user_id: &str,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.rating DESC, r.watched_at DESC
LIMIT ? OFFSET ?",
user_id, limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn count_feed_entries(&self) -> Result<i64, DomainError> {
sqlx::query_scalar!("SELECT COUNT(*) FROM reviews")
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_feed_rows(
&self,
limit: i64,
offset: i64,
) -> Result<Vec<FeedRow>, DomainError> {
sqlx::query_as!(
FeedRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,
u.email AS user_email
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
INNER JOIN users u ON u.id = r.user_id
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
limit, offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_totals(&self, user_id: &str) -> Result<UserTotalsRow, DomainError> {
sqlx::query_as!(
UserTotalsRow,
r#"SELECT COUNT(DISTINCT movie_id) AS "total!: i64",
AVG(CAST(rating AS REAL)) AS avg_rating
FROM reviews WHERE user_id = ?"#,
user_id
)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
async fn fetch_user_favorite_director(
&self,
user_id: &str,
) -> Result<Option<String>, DomainError> {
let row = sqlx::query_scalar!(
"SELECT m.director
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ? AND m.director IS NOT NULL
GROUP BY m.director
ORDER BY COUNT(*) DESC
LIMIT 1",
user_id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(row.flatten())
}
async fn fetch_user_most_active_month(
&self,
user_id: &str,
) -> Result<Option<String>, DomainError> {
let result: Option<Option<String>> = sqlx::query_scalar!(
"SELECT strftime('%Y-%m', watched_at) AS month
FROM reviews
WHERE user_id = ?
GROUP BY month
ORDER BY COUNT(*) DESC
LIMIT 1",
user_id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(result.flatten())
}
} }
#[async_trait] #[async_trait]
@@ -248,14 +412,40 @@ impl MovieRepository for SqliteMovieRepository {
} }
async fn query_diary(&self, filter: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { async fn query_diary(&self, filter: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> {
let movie_id: Option<String> = filter.movie_id.as_ref().map(|id| id.value().to_string());
let limit = filter.page.limit as i64; let limit = filter.page.limit as i64;
let offset = filter.page.offset as i64; let offset = filter.page.offset as i64;
let (total, rows) = tokio::try_join!( let (total, rows) = match (&filter.movie_id, &filter.user_id) {
self.count_diary_entries(movie_id.as_deref()), (None, None) => tokio::try_join!(
self.fetch_diary_rows(movie_id.as_deref(), &filter.sort_by, limit, offset) self.count_diary_entries(None),
)?; self.fetch_all_diary_rows(&filter.sort_by, limit, offset)
)?,
(Some(id), None) => {
let id_str = id.value().to_string();
tokio::try_join!(
self.count_diary_entries(Some(id_str.as_str())),
self.fetch_movie_diary_rows(&id_str, &filter.sort_by, limit, offset)
)?
}
(None, Some(uid)) => {
let uid_str = uid.value().to_string();
match &filter.sort_by {
SortDirection::ByRatingDesc => tokio::try_join!(
self.count_user_diary_entries(&uid_str),
self.fetch_user_diary_rows_by_rating(&uid_str, limit, offset)
)?,
_ => tokio::try_join!(
self.count_user_diary_entries(&uid_str),
self.fetch_user_diary_rows_by_watched(&uid_str, limit, offset)
)?,
}
}
(Some(_), Some(_)) => {
return Err(DomainError::ValidationError(
"Combined movie_id + user_id filter not supported".into(),
));
}
};
let items = rows let items = rows
.into_iter() .into_iter()
@@ -270,6 +460,39 @@ impl MovieRepository for SqliteMovieRepository {
}) })
} }
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError> {
let id = review_id.value().to_string();
sqlx::query_as!(
ReviewRow,
"SELECT id, movie_id, user_id, rating, comment, watched_at, created_at
FROM reviews WHERE id = ?",
id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.map(ReviewRow::to_domain)
.transpose()
}
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError> {
let id = review_id.value().to_string();
sqlx::query!("DELETE FROM reviews WHERE id = ?", id)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(())
}
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError> {
let id = movie_id.value().to_string();
sqlx::query!("DELETE FROM movies WHERE id = ?", id)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(())
}
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> { async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> {
let id_str = movie_id.value().to_string(); let id_str = movie_id.value().to_string();
@@ -300,4 +523,119 @@ impl MovieRepository for SqliteMovieRepository {
Ok(ReviewHistory::new(movie, viewings)) Ok(ReviewHistory::new(movie, viewings))
} }
async fn query_activity_feed(
&self,
page: &PageParams,
) -> Result<Paginated<FeedEntry>, DomainError> {
let limit = page.limit as i64;
let offset = page.offset as i64;
let (total, rows) = tokio::try_join!(
self.count_feed_entries(),
self.fetch_feed_rows(limit, offset)
)?;
let items = rows
.into_iter()
.map(FeedRow::to_domain)
.collect::<Result<Vec<_>, _>>()?;
Ok(Paginated {
items,
total_count: total as u64,
limit: page.limit,
offset: page.offset,
})
}
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError> {
let uid = user_id.value().to_string();
let (totals, fav_director, most_active) = tokio::try_join!(
self.fetch_user_totals(&uid),
self.fetch_user_favorite_director(&uid),
self.fetch_user_most_active_month(&uid)
)?;
let most_active_month = most_active.map(|ym| format_year_month(&ym));
Ok(UserStats {
total_movies: totals.total,
avg_rating: totals.avg_rating,
favorite_director: fav_director,
most_active_month,
})
}
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError> {
let uid = user_id.value().to_string();
let rows = sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ?
ORDER BY r.watched_at DESC",
uid
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?;
rows.into_iter().map(DiaryRow::to_domain).collect()
}
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError> {
let uid = user_id.value().to_string();
let (rating_rows, director_rows) = tokio::try_join!(
sqlx::query_as!(
MonthlyRatingRow,
r#"SELECT strftime('%Y-%m', watched_at) AS "month!",
AVG(CAST(rating AS REAL)) AS "avg_rating!: f64",
COUNT(*) AS "count!: i64"
FROM reviews
WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')
GROUP BY "month!"
ORDER BY "month!" ASC"#,
uid
)
.fetch_all(&self.pool),
sqlx::query_as!(
DirectorCountRow,
r#"SELECT m.director AS "director!",
COUNT(*) AS "count!: i64"
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.user_id = ? AND m.director IS NOT NULL
GROUP BY m.director
ORDER BY COUNT(*) DESC
LIMIT 5"#,
uid
)
.fetch_all(&self.pool)
)
.map_err(Self::map_err)?;
let max_director_count = director_rows.iter().map(|d| d.count).max().unwrap_or(1);
let monthly_ratings = rating_rows
.into_iter()
.map(|r| MonthlyRating {
month_label: format_year_month(&r.month),
year_month: r.month,
avg_rating: r.avg_rating,
count: r.count,
})
.collect();
let top_directors = director_rows
.into_iter()
.map(|d| DirectorStat { director: d.director, count: d.count })
.collect();
Ok(UserTrends { monthly_ratings, top_directors, max_director_count })
}
} }

View File

@@ -1,7 +1,7 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use domain::{ use domain::{
errors::DomainError, errors::DomainError,
models::{DiaryEntry, Movie, Review}, models::{DiaryEntry, FeedEntry, Movie, Review, UserSummary},
value_objects::{ value_objects::{
Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear, Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear,
ReviewId, UserId, ReviewId, UserId,
@@ -111,6 +111,85 @@ impl DiaryRow {
} }
} }
// Like DiaryRow but includes user_email from JOIN with users table
#[derive(sqlx::FromRow)]
pub(crate) struct FeedRow {
pub id: String,
pub external_metadata_id: Option<String>,
pub title: String,
pub release_year: i64,
pub director: Option<String>,
pub poster_path: Option<String>,
pub review_id: String,
pub movie_id: String,
pub user_id: String,
pub rating: i64,
pub comment: Option<String>,
pub watched_at: String,
pub created_at: String,
pub user_email: String,
}
impl FeedRow {
pub fn to_domain(self) -> Result<FeedEntry, DomainError> {
let diary = DiaryRow {
id: self.id,
external_metadata_id: self.external_metadata_id,
title: self.title,
release_year: self.release_year,
director: self.director,
poster_path: self.poster_path,
review_id: self.review_id,
movie_id: self.movie_id,
user_id: self.user_id,
rating: self.rating,
comment: self.comment,
watched_at: self.watched_at,
created_at: self.created_at,
}
.to_domain()?;
Ok(FeedEntry::new(diary, self.user_email))
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct UserSummaryRow {
pub id: String,
pub email: String,
pub total_movies: i64,
pub avg_rating: Option<f64>,
}
impl UserSummaryRow {
pub fn to_domain(self) -> Result<UserSummary, DomainError> {
Ok(UserSummary {
user_id: UserId::from_uuid(parse_uuid(&self.id)?),
email: self.email,
total_movies: self.total_movies,
avg_rating: self.avg_rating,
})
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct UserTotalsRow {
pub total: i64,
pub avg_rating: Option<f64>,
}
#[derive(sqlx::FromRow)]
pub(crate) struct DirectorCountRow {
pub director: String,
pub count: i64,
}
#[derive(sqlx::FromRow)]
pub(crate) struct MonthlyRatingRow {
pub month: String,
pub avg_rating: f64,
pub count: i64,
}
pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> { pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> {
Uuid::parse_str(s) Uuid::parse_str(s)
.map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e))) .map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e)))

View File

@@ -0,0 +1,172 @@
use async_trait::async_trait;
use chrono::Utc;
use sqlx::SqlitePool;
use domain::{
errors::DomainError,
models::User,
ports::UserRepository,
value_objects::{Email, PasswordHash, UserId},
};
use super::models::UserSummaryRow;
pub struct SqliteUserRepository {
pool: SqlitePool,
}
impl SqliteUserRepository {
pub fn new(pool: SqlitePool) -> Self {
Self { pool }
}
fn map_err(e: sqlx::Error) -> DomainError {
tracing::error!("Database error: {:?}", e);
DomainError::InfrastructureError("Database operation failed".into())
}
}
#[async_trait]
impl UserRepository for SqliteUserRepository {
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError> {
let email_str = email.value();
let row = sqlx::query!(
"SELECT id, email, password_hash FROM users WHERE email = ?",
email_str
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
match row {
None => Ok(None),
Some(r) => {
let id = uuid::Uuid::parse_str(&r.id)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let email = Email::new(r.email)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let hash = PasswordHash::new(r.password_hash)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(Some(User::from_persistence(UserId::from_uuid(id), email, hash)))
}
}
}
async fn save(&self, user: &User) -> Result<(), DomainError> {
let id = user.id().value().to_string();
let email = user.email().value();
let hash = user.password_hash().value();
let created_at = Utc::now().to_rfc3339();
let result = sqlx::query!(
"INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
id,
email,
hash,
created_at
)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
if result.rows_affected() == 0 {
return Err(DomainError::ValidationError("Email already registered".into()));
}
Ok(())
}
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError> {
let id_str = id.value().to_string();
let row = sqlx::query!(
"SELECT id, email, password_hash FROM users WHERE id = ?",
id_str
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?;
match row {
None => Ok(None),
Some(r) => {
let uuid = uuid::Uuid::parse_str(&r.id)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let email = Email::new(r.email)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let hash = PasswordHash::new(r.password_hash)
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(Some(User::from_persistence(UserId::from_uuid(uuid), email, hash)))
}
}
}
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> {
sqlx::query_as!(
UserSummaryRow,
r#"SELECT u.id,
u.email,
COUNT(DISTINCT r.movie_id) AS "total_movies!: i64",
AVG(CAST(r.rating AS REAL)) AS avg_rating
FROM users u
LEFT JOIN reviews r ON r.user_id = u.id
GROUP BY u.id, u.email
ORDER BY u.email ASC"#
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?
.into_iter()
.map(UserSummaryRow::to_domain)
.collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use sqlx::SqlitePool;
async fn setup() -> (SqlitePool, SqliteUserRepository) {
let pool = SqlitePool::connect(":memory:").await.unwrap();
sqlx::query(
"CREATE TABLE users (id TEXT PRIMARY KEY, email TEXT NOT NULL UNIQUE, password_hash TEXT NOT NULL, created_at TEXT NOT NULL)"
)
.execute(&pool)
.await
.unwrap();
let repo = SqliteUserRepository::new(pool.clone());
(pool, repo)
}
#[tokio::test]
async fn find_by_id_returns_none_when_not_found() {
let (_, repo) = setup().await;
let result = repo
.find_by_id(&UserId::from_uuid(uuid::Uuid::new_v4()))
.await
.unwrap();
assert!(result.is_none());
}
#[tokio::test]
async fn find_by_id_returns_user_when_found() {
let (pool, repo) = setup().await;
let id = uuid::Uuid::new_v4();
sqlx::query(
"INSERT INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)"
)
.bind(id.to_string())
.bind("test@example.com")
.bind("$argon2id$v=19$m=65536,t=2,p=1$fakesalt$fakehash")
.bind("2026-01-01T00:00:00Z")
.execute(&pool)
.await
.unwrap();
let result = repo
.find_by_id(&UserId::from_uuid(id))
.await
.unwrap();
assert!(result.is_some());
assert_eq!(result.unwrap().email().value(), "test@example.com");
}
}

View File

@@ -7,6 +7,7 @@ edition = "2024"
askama = { version = "0.16.0" } askama = { version = "0.16.0" }
serde = { workspace = true } serde = { workspace = true }
chrono = { workspace = true }
domain = { workspace = true } domain = { workspace = true }
application = { workspace = true } application = { workspace = true }

View File

@@ -1,6 +1,13 @@
use askama::Template; use askama::Template;
use application::ports::HtmlRenderer; use chrono::Datelike;
use domain::models::{DiaryEntry, collections::Paginated}; use application::ports::{
ActivityFeedPageData, HtmlPageContext, HtmlRenderer, LoginPageData,
NewReviewPageData, ProfilePageData, RegisterPageData, UsersPageData,
};
use domain::models::{
DiaryEntry, FeedEntry, MonthActivity, MonthlyRating, UserStats, UserSummary, UserTrends,
collections::Paginated,
};
#[derive(Template)] #[derive(Template)]
#[template(path = "diary.html")] #[template(path = "diary.html")]
@@ -9,6 +16,117 @@ struct DiaryTemplate<'a> {
current_offset: u32, current_offset: u32,
limit: u32, limit: u32,
has_more: bool, has_more: bool,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "login.html")]
struct LoginTemplate<'a> {
error: Option<&'a str>,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "register.html")]
struct RegisterTemplate<'a> {
error: Option<&'a str>,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "new_review.html")]
struct NewReviewTemplate<'a> {
error: Option<&'a str>,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "activity_feed.html")]
struct ActivityFeedTemplate<'a> {
entries: &'a [FeedEntry],
current_offset: u32,
limit: u32,
has_more: bool,
ctx: &'a HtmlPageContext,
}
#[derive(Template)]
#[template(path = "users.html")]
struct UsersTemplate<'a> {
users: &'a [UserSummary],
ctx: &'a HtmlPageContext,
}
struct MonthlyRatingRow<'a> {
rating: &'a MonthlyRating,
bar_height_px: i64,
}
#[derive(Template)]
#[template(path = "profile.html")]
struct ProfileTemplate<'a> {
ctx: &'a HtmlPageContext,
profile_display_name: String,
stats: &'a UserStats,
view: &'a str,
entries: Option<&'a Paginated<DiaryEntry>>,
current_offset: u32,
has_more: bool,
limit: u32,
history: Option<&'a Vec<MonthActivity>>,
trends: Option<&'a UserTrends>,
monthly_rating_rows: Vec<MonthlyRatingRow<'a>>,
heatmap: Vec<HeatmapCell>,
}
struct HeatmapCell {
month_label: String,
count: i64,
alpha: f64,
}
#[allow(dead_code)]
fn relative_time(dt: chrono::NaiveDateTime) -> String {
let now = chrono::Utc::now().naive_utc();
let diff = now.signed_duration_since(dt);
if diff.num_seconds() <= 0 { return "just now".to_string(); }
let minutes = diff.num_minutes();
let hours = diff.num_hours();
let days = diff.num_days();
if minutes < 1 { return "just now".to_string(); }
if minutes < 60 { return format!("{} min ago", minutes); }
if hours < 24 { return format!("{} h ago", hours); }
if days == 1 { return "yesterday".to_string(); }
if days < 30 { return format!("{} days ago", days); }
dt.format("%b %-d, %Y").to_string()
}
fn build_heatmap(history: &[MonthActivity]) -> Vec<HeatmapCell> {
let current_year = chrono::Utc::now().year();
let count_for = |m: &str| -> i64 {
history.iter().find(|a| a.year_month == format!("{}-{}", current_year, m))
.map(|a| a.count)
.unwrap_or(0)
};
let months = [
("01", "Jan"), ("02", "Feb"), ("03", "Mar"), ("04", "Apr"),
("05", "May"), ("06", "Jun"), ("07", "Jul"), ("08", "Aug"),
("09", "Sep"), ("10", "Oct"), ("11", "Nov"), ("12", "Dec"),
];
let counts: Vec<i64> = months.iter().map(|(m, _)| count_for(m)).collect();
let max = counts.iter().copied().max().unwrap_or(0).max(1);
months.iter().zip(counts.iter()).map(|((_, label), &count)| {
let alpha = if count == 0 { 0.05 } else { 0.15 + 0.75 * (count as f64 / max as f64) };
HeatmapCell {
month_label: label.to_string(),
count,
alpha,
}
}).collect()
}
fn bar_height_px(avg_rating: f64) -> i64 {
(avg_rating / 5.0 * 60.0) as i64
} }
pub struct AskamaHtmlRenderer; pub struct AskamaHtmlRenderer;
@@ -20,16 +138,94 @@ impl AskamaHtmlRenderer {
} }
impl HtmlRenderer for AskamaHtmlRenderer { impl HtmlRenderer for AskamaHtmlRenderer {
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String> { fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String> {
let has_more = (data.offset + data.limit) < data.total_count as u32; let has_more = (data.offset + data.limit) < data.total_count as u32;
DiaryTemplate {
let template = DiaryTemplate {
entries: &data.items, entries: &data.items,
current_offset: data.offset, current_offset: data.offset,
limit: data.limit, limit: data.limit,
has_more, has_more,
}; ctx: &ctx,
}
.render()
.map_err(|e| e.to_string())
}
template.render().map_err(|e| e.to_string()) fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String> {
LoginTemplate {
error: data.error,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String> {
RegisterTemplate {
error: data.error,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String> {
NewReviewTemplate {
error: data.error,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String> {
ActivityFeedTemplate {
entries: &data.entries.items,
current_offset: data.current_offset,
limit: data.limit,
has_more: data.has_more,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_users_page(&self, data: UsersPageData) -> Result<String, String> {
UsersTemplate {
users: &data.users,
ctx: &data.ctx,
}
.render()
.map_err(|e| e.to_string())
}
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String> {
let heatmap = data.history.as_deref()
.map(|h| build_heatmap(h))
.unwrap_or_default();
let profile_display_name = data.profile_user_email
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
bar_height_px: bar_height_px(r.avg_rating),
rating: r,
}).collect())
.unwrap_or_default();
ProfileTemplate {
ctx: &data.ctx,
profile_display_name,
stats: &data.stats,
view: &data.view,
entries: data.entries.as_ref(),
current_offset: data.current_offset,
has_more: data.has_more,
limit: data.limit,
history: data.history.as_ref(),
trends: data.trends.as_ref(),
monthly_rating_rows,
heatmap,
}
.render()
.map_err(|e| e.to_string())
} }
} }

View File

@@ -0,0 +1,50 @@
{% extends "base.html" %}
{% block content %}
<div class="diary">
{% for entry in entries %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster">
<img src="/posters/{{ poster.value() }}" alt="">
</div>
{% endif %}
<div class="entry-body">
<div class="entry-title">
{{ entry.movie().title().value() }}
<span class="year">({{ entry.movie().release_year().value() }})</span>
</div>
{% if let Some(dir) = entry.movie().director() %}
<div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="feed-meta">
<a href="/users/{{ entry.review().user_id().value() }}" class="feed-user">{{ entry.user_display_name() }}</a>
<span class="feed-time">{{ entry.review().watched_at().format("%b %-d, %Y") }}</span>
</div>
{% if ctx.is_current_user(entry.review().user_id().value()) %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %}
</div>
</article>
{% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset >= limit %}
<a href="/?offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="/?offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endblock %}

View File

@@ -0,0 +1,42 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>{{ ctx.page_title }}</title>
<meta name="description" content="A personal movie diary — track what you watch, rate and review films.">
<meta property="og:type" content="website">
<meta property="og:site_name" content="Movies Diary">
<meta property="og:title" content="{{ ctx.page_title }}">
<meta property="og:url" content="{{ ctx.canonical_url }}">
<meta name="twitter:card" content="summary">
<meta name="twitter:title" content="{{ ctx.page_title }}">
<link rel="canonical" href="{{ ctx.canonical_url }}">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
<link rel="stylesheet" href="/static/style.css">
</head>
<body>
<header>
<a href="/" class="site-title">Movies Diary</a>
<nav>
<a href="/">Feed</a>
<a href="/users">Users</a>
<a href="{{ ctx.rss_url }}">RSS</a>
{% if let Some(email) = ctx.user_email %}
<a href="/reviews/new">Add Review</a>
<a href="/logout">Logout</a>
{% else %}
<a href="/login">Login</a>
{% if ctx.register_enabled %}
<a href="/register">Register</a>
{% endif %}
{% endif %}
</nav>
</header>
<main>
{% block content %}{% endblock %}
</main>
</body>
</html>

View File

@@ -1,76 +1,51 @@
<!-- crates/presentation/templates/diary.html --> {% extends "base.html" %}
<!DOCTYPE html> {% block content %}
<html lang="en"> <div class="diary">
<head> {% for entry in entries %}
<meta charset="UTF-8"> <article class="entry">
<title>My Movie Diary</title> {% if let Some(poster) = entry.movie().poster_path() %}
<style> <div class="poster">
/* Minimalist old-school styling */ <img src="/posters/{{ poster.value() }}" alt="">
body { font-family: monospace; max-width: 800px; margin: 0 auto; padding: 20px; }
.entry { border-bottom: 1px solid #ccc; padding: 10px 0; }
.poster { max-width: 100px; float: left; margin-right: 15px; }
.clear { clear: both; }
.error { color: red; }
</style>
</head>
<body>
<h1>Movie Diary</h1>
<!-- Zero-JS Form Submission -->
<form action="/reviews" method="POST">
<fieldset>
<legend>Log a Movie</legend>
<label for="tmdb_id">TMDB ID (Optional):</label>
<input type="text" name="external_metadata_id" id="tmdb_id"><br><br>
<label for="title">Title (Fallback):</label>
<input type="text" name="manual_title" id="title"><br><br>
<label for="year">Year (Fallback):</label>
<input type="number" name="manual_release_year" id="year" min="1888"><br><br>
<label for="rating">Rating (0-5):</label>
<input type="number" name="rating" id="rating" min="0" max="5" required><br><br>
<button type="submit">Log Movie</button>
</fieldset>
</form>
<hr>
<!-- Rendering the Domain Models -->
<div class="diary-entries">
{% for entry in entries %}
<div class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<!-- Assuming you have a route to serve the raw images -->
<img src="/static/posters/{{ poster.value() }}" class="poster" alt="Poster">
{% endif %}
<h3>{{ entry.movie().title().value() }} ({{ entry.movie().release_year().value() }})</h3>
<p><strong>Rating:</strong> {{ entry.review().rating().value() }} / 5</p>
{% if let Some(comment) = entry.review().comment() %}
<p><em>"{{ comment.value() }}"</em></p>
{% endif %}
<p><small>Watched on: {{ entry.review().watched_at().format("%Y-%m-%d") }}</small></p>
<div class="clear"></div>
</div>
{% else %}
<p>No movies logged yet. Go watch something!</p>
{% endfor %}
</div> </div>
{% endif %}
<!-- Simple Pagination --> <div class="entry-body">
<div> <div class="entry-title">
{% if current_offset > 0 %} {{ entry.movie().title().value() }}
<a href="/diary?offset={{ current_offset - limit }}">Previous Page</a> <span class="year">({{ entry.movie().release_year().value() }})</span>
{% endif %} </div>
{% if has_more %} {% if let Some(dir) = entry.movie().director() %}
<a href="/diary?offset={{ current_offset + limit }}">Next Page</a> <div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
<span class="star {% if entry.review().rating().value() >= 1 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 2 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 3 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 4 %}filled{% else %}empty{% endif %}"></span>
<span class="star {% if entry.review().rating().value() >= 5 %}filled{% else %}empty{% endif %}"></span>
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
{% if let Some(uid) = ctx.user_id %}
{% if *uid == entry.review().user_id().value() %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %} {% endif %}
{% endif %}
</div> </div>
</body> </article>
</html> {% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset > 0 %}
<a href="/?offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="/?offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endblock %}

View File

@@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block content %}
<h1>Login</h1>
{% if let Some(err) = error %}
<p class="error">{{ err }}</p>
{% endif %}
<form method="POST" action="/login">
<label>
Email<br>
<input type="email" name="email" required autocomplete="email">
</label>
<label>
Password<br>
<input type="password" name="password" required autocomplete="current-password">
</label>
<button type="submit">Login</button>
</form>
{% endblock %}

View File

@@ -0,0 +1,40 @@
{% extends "base.html" %}
{% block content %}
<h1>Log a Review</h1>
{% if let Some(err) = error %}
<p class="error">{{ err }}</p>
{% endif %}
<form method="POST" action="/reviews">
<label>
OMDB ID <span class="optional">(optional)</span><br>
<input type="text" name="external_metadata_id" placeholder="tt0166924">
</label>
<hr>
<label>
Title<br>
<input type="text" name="manual_title">
</label>
<label>
Year<br>
<input type="number" name="manual_release_year" min="1888" max="2100">
</label>
<label>
Director<br>
<input type="text" name="manual_director">
</label>
<hr>
<label>
Rating (05)<br>
<input type="number" name="rating" min="0" max="5" required>
</label>
<label>
Watched<br>
<input type="datetime-local" name="watched_at" required>
</label>
<label>
Comment<br>
<textarea name="comment"></textarea>
</label>
<button type="submit">Log Review</button>
</form>
{% endblock %}

View File

@@ -0,0 +1,165 @@
{% extends "base.html" %}
{% block content %}
<div class="profile">
<div class="stats-header">
<div class="profile-name">{{ profile_display_name }}</div>
<div class="stats-grid">
<div class="stat-tile">
<div class="stat-value">{{ stats.total_movies }}</div>
<div class="stat-label">movies</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.avg_rating_display() }}★</div>
<div class="stat-label">avg rating</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.favorite_director_display() }}</div>
<div class="stat-label">fav director</div>
</div>
<div class="stat-tile">
<div class="stat-value">{{ stats.most_active_month_display() }}</div>
<div class="stat-label">most active</div>
</div>
</div>
</div>
<div class="view-tabs">
<a href="?view=recent" class="view-tab {% if view == "recent" %}active{% endif %}">Recent</a>
<a href="?view=ratings" class="view-tab {% if view == "ratings" %}active{% endif %}">Top Rated</a>
<a href="?view=history" class="view-tab {% if view == "history" %}active{% endif %}">History</a>
<a href="?view=trends" class="view-tab {% if view == "trends" %}active{% endif %}">Trends</a>
</div>
{% if view == "history" %}
{% if let Some(hist) = history %}
<div class="heatmap-section">
<div class="heatmap-label">Movies watched this year</div>
<div class="heatmap">
{% for cell in heatmap %}
<div class="heatmap-cell" style="--alpha: {{ cell.alpha }}">
<div class="heatmap-count">{{ cell.count }}</div>
<div class="heatmap-month">{{ cell.month_label }}</div>
</div>
{% endfor %}
</div>
</div>
{% for month in hist %}
<div class="history-month">
<h3 class="month-heading">{{ month.month_label }} <span class="month-count">{{ month.count }}</span></h3>
<div class="diary">
{% for entry in month.entries %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster"><img src="/posters/{{ poster.value() }}" alt=""></div>
{% endif %}
<div class="entry-body">
<div class="entry-title">{{ entry.movie().title().value() }} <span class="year">({{ entry.movie().release_year().value() }})</span></div>
{% if let Some(dir) = entry.movie().director() %}<div class="director">{{ dir }}</div>{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
<div class="watched-at">{{ entry.review().watched_at().format("%b %-d") }}</div>
</div>
</article>
{% endfor %}
</div>
</div>
{% else %}
<p class="empty">No movies logged yet.</p>
{% endfor %}
{% endif %}
{% elif view == "trends" %}
{% if let Some(t) = trends %}
<div class="trends-section">
{% if !monthly_rating_rows.is_empty() %}
<div class="chart-block">
<div class="chart-label">Average rating per month</div>
<div class="bar-chart">
{% for row in monthly_rating_rows %}
<div class="bar-col">
<div class="bar-value">{{ "{:.1}"|format(row.rating.avg_rating) }}</div>
<div class="bar-fill" style="height: {{ row.bar_height_px }}px"></div>
<div class="bar-month">{{ row.rating.month_label }}</div>
</div>
{% endfor %}
</div>
</div>
{% endif %}
{% if !t.top_directors.is_empty() %}
<div class="chart-block">
<div class="chart-label">Most watched directors</div>
<div class="director-chart">
{% for d in t.top_directors %}
<div class="director-row">
<div class="director-name">{{ d.director }}</div>
<div class="director-bar">
{% if t.max_director_count > 0 %}
<div class="director-bar-fill" style="width: {{ d.count * 100 / t.max_director_count }}%"></div>
{% else %}
<div class="director-bar-fill" style="width: 0%"></div>
{% endif %}
</div>
<div class="director-count">{{ d.count }}</div>
</div>
{% endfor %}
</div>
</div>
{% endif %}
</div>
{% endif %}
{% else %}
{% if let Some(paged) = entries %}
<div class="diary">
{% for entry in paged.items %}
<article class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<div class="poster">
<img src="/posters/{{ poster.value() }}" alt="">
</div>
{% endif %}
<div class="entry-body">
<div class="entry-title">
{{ entry.movie().title().value() }}
<span class="year">({{ entry.movie().release_year().value() }})</span>
</div>
{% if let Some(dir) = entry.movie().director() %}
<div class="director">{{ dir }}</div>
{% endif %}
<div class="rating">
{% for filled in entry.review().stars() %}
<span class="star {% if filled %}filled{% else %}empty{% endif %}"></span>
{% endfor %}
</div>
{% if let Some(comment) = entry.review().comment() %}
<div class="comment">{{ comment.value() }}</div>
{% endif %}
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
{% if ctx.is_current_user(entry.review().user_id().value()) %}
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
<button type="submit">Delete</button>
</form>
{% endif %}
</div>
</article>
{% else %}
<p class="empty">No reviews yet.</p>
{% endfor %}
</div>
<nav class="pagination">
{% if current_offset >= limit %}
<a href="?view={{ view }}&offset={{ current_offset - limit }}">&larr; Prev</a>
{% endif %}
{% if has_more %}
<a href="?view={{ view }}&offset={{ current_offset + limit }}">Next &rarr;</a>
{% endif %}
</nav>
{% endif %}
{% endif %}
</div>
{% endblock %}

View File

@@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block content %}
<h1>Register</h1>
{% if let Some(err) = error %}
<p class="error">{{ err }}</p>
{% endif %}
<form method="POST" action="/register">
<label>
Email<br>
<input type="email" name="email" required autocomplete="email">
</label>
<label>
Password<br>
<input type="password" name="password" required autocomplete="new-password">
</label>
<button type="submit">Register</button>
</form>
{% endblock %}

View File

@@ -0,0 +1,18 @@
{% extends "base.html" %}
{% block content %}
<div class="users-list">
<h2 class="page-title">Members</h2>
{% for user in users %}
<div class="user-row">
<div class="user-avatar">{{ user.initial() }}</div>
<div class="user-info">
<div class="user-name">{{ user.display_name() }}</div>
<div class="user-meta">{{ user.total_movies }} movies · avg {{ user.avg_rating_display() }}★</div>
</div>
<a href="/users/{{ user.user_id.value() }}" class="btn-secondary">View profile →</a>
</div>
{% else %}
<p class="empty">No users yet.</p>
{% endfor %}
</div>
{% endblock %}

View File

@@ -4,7 +4,11 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
async-trait = { workspace = true }
domain = { workspace = true } domain = { workspace = true }
uuid = { workspace = true } uuid = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
[dev-dependencies]
tokio = { workspace = true }

View File

@@ -14,7 +14,23 @@ pub struct LogReviewCommand {
pub watched_at: NaiveDateTime, pub watched_at: NaiveDateTime,
} }
#[derive(Clone)]
pub struct SyncPosterCommand { pub struct SyncPosterCommand {
pub movie_id: Uuid, pub movie_id: Uuid,
pub external_metadata_id: String, pub external_metadata_id: String,
} }
pub struct LoginCommand {
pub email: String,
pub password: String,
}
pub struct RegisterCommand {
pub email: String,
pub password: String,
}
pub struct DeleteReviewCommand {
pub review_id: Uuid,
pub requesting_user_id: Uuid,
}

View File

@@ -0,0 +1,16 @@
#[derive(Clone)]
pub struct AppConfig {
pub allow_registration: bool,
pub base_url: String,
}
impl AppConfig {
pub fn from_env() -> Self {
let allow_registration = std::env::var("ALLOW_REGISTRATION")
.map(|v| v == "true" || v == "1")
.unwrap_or(false);
let base_url = std::env::var("BASE_URL")
.unwrap_or_else(|_| "http://localhost:3000".to_string());
Self { allow_registration, base_url }
}
}

View File

@@ -2,9 +2,11 @@ use std::sync::Arc;
use domain::ports::{ use domain::ports::{
AuthService, EventPublisher, MetadataClient, MovieRepository, PasswordHasher, AuthService, EventPublisher, MetadataClient, MovieRepository, PasswordHasher,
PosterFetcherClient, PosterStorage, PosterFetcherClient, PosterStorage, UserRepository,
}; };
use crate::config::AppConfig;
#[derive(Clone)] #[derive(Clone)]
pub struct AppContext { pub struct AppContext {
pub repository: Arc<dyn MovieRepository>, pub repository: Arc<dyn MovieRepository>,
@@ -14,4 +16,6 @@ pub struct AppContext {
pub event_publisher: Arc<dyn EventPublisher>, pub event_publisher: Arc<dyn EventPublisher>,
pub auth_service: Arc<dyn AuthService>, pub auth_service: Arc<dyn AuthService>,
pub password_hasher: Arc<dyn PasswordHasher>, pub password_hasher: Arc<dyn PasswordHasher>,
pub user_repository: Arc<dyn UserRepository>,
pub config: AppConfig,
} }

View File

@@ -1,5 +1,7 @@
pub mod commands; pub mod commands;
pub mod config;
pub mod context; pub mod context;
pub mod movie_resolver;
pub mod ports; pub mod ports;
pub mod queries; pub mod queries;
pub mod use_cases; pub mod use_cases;

View File

@@ -0,0 +1,595 @@
use async_trait::async_trait;
use domain::{
errors::DomainError,
models::Movie,
ports::{MetadataClient, MetadataSearchCriteria, MovieRepository},
value_objects::{ExternalMetadataId, MovieTitle, ReleaseYear},
};
use crate::commands::LogReviewCommand;
pub struct MovieResolverDeps<'a> {
pub repository: &'a dyn MovieRepository,
pub metadata_client: &'a dyn MetadataClient,
}
#[async_trait]
pub trait ResolutionStrategy: Send + Sync {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool;
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError>;
}
pub struct ExternalIdStrategy;
pub struct TitleSearchStrategy;
pub struct ManualMovieStrategy;
pub struct MovieResolver {
strategies: Vec<Box<dyn ResolutionStrategy>>,
}
impl MovieResolver {
pub fn default_pipeline() -> Self {
Self {
strategies: vec![
Box::new(ExternalIdStrategy),
Box::new(TitleSearchStrategy),
Box::new(ManualMovieStrategy),
],
}
}
pub async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<(Movie, bool), DomainError> {
for strategy in &self.strategies {
if strategy.can_handle(cmd) {
if let Some(result) = strategy.resolve(cmd, deps).await? {
return Ok(result);
}
}
}
Err(DomainError::ValidationError(
"Manual title required if TMDB fetch fails or is omitted".into(),
))
}
}
#[async_trait]
impl ResolutionStrategy for ExternalIdStrategy {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
cmd.external_metadata_id.is_some()
}
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError> {
let ext_id_str = cmd.external_metadata_id.as_deref().unwrap();
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
if let Some(m) = deps.repository.get_movie_by_external_id(&tmdb_id).await? {
return Ok(Some((m, false)));
}
match deps
.metadata_client
.fetch_movie_metadata(&MetadataSearchCriteria::ImdbId(tmdb_id))
.await
{
Ok(m) => Ok(Some((m, true))),
Err(e) => {
tracing::warn!(
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
e
);
Ok(None)
}
}
}
}
#[async_trait]
impl ResolutionStrategy for TitleSearchStrategy {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
cmd.manual_title.is_some()
}
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError> {
let title = cmd.manual_title.as_deref().unwrap();
let criteria = MetadataSearchCriteria::Title {
title: title.to_string(),
year: cmd.manual_release_year,
};
match deps.metadata_client.fetch_movie_metadata(&criteria).await {
Ok(m) => Ok(Some((m, true))),
Err(e) => {
tracing::warn!("OMDb title search failed, falling back to manual: {:?}", e);
Ok(None)
}
}
}
}
#[async_trait]
impl ResolutionStrategy for ManualMovieStrategy {
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
cmd.manual_title.is_some()
}
async fn resolve(
&self,
cmd: &LogReviewCommand,
deps: &MovieResolverDeps<'_>,
) -> Result<Option<(Movie, bool)>, DomainError> {
let title_str = match &cmd.manual_title {
Some(t) => t,
None => return Ok(None),
};
let year_val = cmd.manual_release_year.ok_or_else(|| {
DomainError::ValidationError(
"Manual release year required if TMDB fetch fails or is omitted".into(),
)
})?;
let title = MovieTitle::new(title_str.clone())?;
let release_year = ReleaseYear::new(year_val)?;
let candidates = deps
.repository
.get_movies_by_title_and_year(&title, &release_year)
.await?;
let matched = candidates
.into_iter()
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
if let Some(existing) = matched {
Ok(Some((existing, false)))
} else {
let new_movie =
Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
Ok(Some((new_movie, true)))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::NaiveDate;
use domain::{
errors::DomainError,
events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
ports::{MetadataSearchCriteria, MovieRepository},
value_objects::{
ExternalMetadataId, MovieId, MovieTitle, PosterUrl, ReleaseYear, ReviewId,
},
};
fn make_cmd(ext_id: Option<&str>, title: Option<&str>, year: Option<u16>) -> LogReviewCommand {
LogReviewCommand {
external_metadata_id: ext_id.map(String::from),
manual_title: title.map(String::from),
manual_release_year: year,
manual_director: None,
user_id: uuid::Uuid::new_v4(),
rating: 4,
comment: None,
watched_at: NaiveDate::from_ymd_opt(2024, 1, 1)
.unwrap()
.and_hms_opt(0, 0, 0)
.unwrap(),
}
}
fn make_movie() -> Movie {
Movie::new(
None,
MovieTitle::new("Inception".to_string()).unwrap(),
ReleaseYear::new(2010).unwrap(),
None,
None,
)
}
struct RepoWithExternalMovie(Movie);
struct RepoEmpty;
struct RepoWithTitleMatch(Movie);
#[async_trait]
impl MovieRepository for RepoWithExternalMovie {
async fn get_movie_by_external_id(
&self,
_: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> {
Ok(Some(self.0.clone()))
}
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movies_by_title_and_year(
&self,
_: &MovieTitle,
_: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> {
panic!("unexpected")
}
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
panic!("unexpected")
}
async fn query_diary(
&self,
_: &DiaryFilter,
) -> Result<Paginated<DiaryEntry>, DomainError> {
panic!("unexpected")
}
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
panic!("unexpected")
}
async fn get_review_by_id(
&self,
_: &ReviewId,
) -> Result<Option<Review>, DomainError> {
panic!("unexpected")
}
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl MovieRepository for RepoEmpty {
async fn get_movie_by_external_id(
&self,
_: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> {
Ok(None)
}
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movies_by_title_and_year(
&self,
_: &MovieTitle,
_: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> {
Ok(vec![])
}
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
panic!("unexpected")
}
async fn query_diary(
&self,
_: &DiaryFilter,
) -> Result<Paginated<DiaryEntry>, DomainError> {
panic!("unexpected")
}
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
panic!("unexpected")
}
async fn get_review_by_id(
&self,
_: &ReviewId,
) -> Result<Option<Review>, DomainError> {
panic!("unexpected")
}
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl MovieRepository for RepoWithTitleMatch {
async fn get_movie_by_external_id(
&self,
_: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
panic!("unexpected")
}
async fn get_movies_by_title_and_year(
&self,
_: &MovieTitle,
_: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> {
Ok(vec![self.0.clone()])
}
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
panic!("unexpected")
}
async fn query_diary(
&self,
_: &DiaryFilter,
) -> Result<Paginated<DiaryEntry>, DomainError> {
panic!("unexpected")
}
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
panic!("unexpected")
}
async fn get_review_by_id(
&self,
_: &ReviewId,
) -> Result<Option<Review>, DomainError> {
panic!("unexpected")
}
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
panic!("unexpected")
}
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
}
struct MetaReturnsMovie(Movie);
struct MetaErrors;
#[async_trait]
impl MetadataClient for MetaReturnsMovie {
async fn fetch_movie_metadata(
&self,
_: &MetadataSearchCriteria,
) -> Result<Movie, DomainError> {
Ok(self.0.clone())
}
async fn get_poster_url(
&self,
_: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
panic!("unexpected")
}
}
#[async_trait]
impl MetadataClient for MetaErrors {
async fn fetch_movie_metadata(
&self,
_: &MetadataSearchCriteria,
) -> Result<Movie, DomainError> {
Err(DomainError::InfrastructureError("metadata unavailable".into()))
}
async fn get_poster_url(
&self,
_: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
panic!("unexpected")
}
}
// --- ExternalIdStrategy ---
#[test]
fn external_id_strategy_can_handle_cmd_with_id() {
let cmd = make_cmd(Some("tt123"), None, None);
assert!(ExternalIdStrategy.can_handle(&cmd));
}
#[test]
fn external_id_strategy_cannot_handle_cmd_without_id() {
let cmd = make_cmd(None, Some("Inception"), Some(2010));
assert!(!ExternalIdStrategy.can_handle(&cmd));
}
#[tokio::test]
async fn external_id_strategy_returns_cached_movie() {
let movie = make_movie();
let repo = RepoWithExternalMovie(movie.clone());
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, false))));
}
#[tokio::test]
async fn external_id_strategy_fetches_from_metadata_when_not_cached() {
let movie = make_movie();
let repo = RepoEmpty;
let meta = MetaReturnsMovie(movie);
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, true))));
}
#[tokio::test]
async fn external_id_strategy_falls_through_on_metadata_error() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(result.is_none());
}
// --- TitleSearchStrategy ---
#[test]
fn title_strategy_can_handle_cmd_with_title() {
let cmd = make_cmd(None, Some("Inception"), Some(2010));
assert!(TitleSearchStrategy.can_handle(&cmd));
}
#[test]
fn title_strategy_cannot_handle_cmd_without_title() {
let cmd = make_cmd(Some("tt123"), None, None);
assert!(!TitleSearchStrategy.can_handle(&cmd));
}
#[tokio::test]
async fn title_strategy_fetches_from_metadata() {
let movie = make_movie();
let repo = RepoEmpty;
let meta = MetaReturnsMovie(movie);
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, true))));
}
#[tokio::test]
async fn title_strategy_falls_through_on_metadata_error() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(result.is_none());
}
// --- ManualMovieStrategy ---
#[test]
fn manual_strategy_can_handle_cmd_with_title() {
let cmd = make_cmd(None, Some("Inception"), Some(2010));
assert!(ManualMovieStrategy.can_handle(&cmd));
}
#[test]
fn manual_strategy_cannot_handle_cmd_without_title() {
let cmd = make_cmd(Some("tt123"), None, None);
assert!(!ManualMovieStrategy.can_handle(&cmd));
}
#[tokio::test]
async fn manual_strategy_returns_existing_movie() {
let movie = make_movie();
let repo = RepoWithTitleMatch(movie.clone());
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, false))));
}
#[tokio::test]
async fn manual_strategy_creates_new_movie_when_no_match() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), Some(2010));
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
assert!(matches!(result, Some((_, true))));
}
#[tokio::test]
async fn manual_strategy_errors_without_year() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, Some("Inception"), None);
assert!(ManualMovieStrategy.resolve(&cmd, &deps).await.is_err());
}
// --- MovieResolver pipeline ---
#[tokio::test]
async fn resolver_returns_error_when_no_strategy_matches() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(None, None, None);
let result = MovieResolver::default_pipeline().resolve(&cmd, &deps).await;
assert!(result.is_err());
}
#[tokio::test]
async fn resolver_uses_cached_movie_when_external_id_matches() {
let movie = make_movie();
let repo = RepoWithExternalMovie(movie.clone());
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), None, None);
let (_, is_new) = MovieResolver::default_pipeline()
.resolve(&cmd, &deps)
.await
.unwrap();
assert!(!is_new);
}
#[tokio::test]
async fn resolver_falls_through_to_manual_when_external_and_title_both_fail() {
let repo = RepoEmpty;
let meta = MetaErrors;
let deps = MovieResolverDeps {
repository: &repo,
metadata_client: &meta,
};
let cmd = make_cmd(Some("tt123"), Some("Inception"), Some(2010));
let (_, is_new) = MovieResolver::default_pipeline()
.resolve(&cmd, &deps)
.await
.unwrap();
assert!(is_new);
}
}

View File

@@ -1,5 +1,74 @@
use domain::models::{DiaryEntry, collections::Paginated}; use uuid::Uuid;
use domain::models::{DiaryEntry, FeedEntry, MonthActivity, UserStats, UserSummary, UserTrends, collections::Paginated};
pub struct HtmlPageContext {
pub user_email: Option<String>,
pub user_id: Option<Uuid>,
pub register_enabled: bool,
pub rss_url: String,
pub page_title: String,
pub canonical_url: String,
}
impl HtmlPageContext {
pub fn is_current_user(&self, id: Uuid) -> bool {
self.user_id == Some(id)
}
}
pub struct LoginPageData<'a> {
pub ctx: HtmlPageContext,
pub error: Option<&'a str>,
}
pub struct RegisterPageData<'a> {
pub ctx: HtmlPageContext,
pub error: Option<&'a str>,
}
pub struct NewReviewPageData<'a> {
pub ctx: HtmlPageContext,
pub error: Option<&'a str>,
}
pub struct ActivityFeedPageData {
pub ctx: HtmlPageContext,
pub entries: Paginated<FeedEntry>,
pub current_offset: u32,
pub has_more: bool,
pub limit: u32,
}
pub struct UsersPageData {
pub ctx: HtmlPageContext,
pub users: Vec<UserSummary>,
}
pub struct ProfilePageData {
pub ctx: HtmlPageContext,
pub profile_user_id: Uuid,
pub profile_user_email: String,
pub stats: UserStats,
pub view: String,
pub entries: Option<Paginated<DiaryEntry>>,
pub current_offset: u32,
pub has_more: bool,
pub limit: u32,
pub history: Option<Vec<MonthActivity>>,
pub trends: Option<UserTrends>,
}
pub trait HtmlRenderer: Send + Sync { pub trait HtmlRenderer: Send + Sync {
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String>; fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String>;
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String>;
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String>;
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String>;
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String>;
fn render_users_page(&self, data: UsersPageData) -> Result<String, String>;
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String>;
}
pub trait RssFeedRenderer: Send + Sync {
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String>;
} }

View File

@@ -6,8 +6,23 @@ pub struct GetDiaryQuery {
pub offset: Option<u32>, pub offset: Option<u32>,
pub sort_by: Option<SortDirection>, pub sort_by: Option<SortDirection>,
pub movie_id: Option<Uuid>, pub movie_id: Option<Uuid>,
pub user_id: Option<Uuid>,
} }
pub struct GetReviewHistoryQuery { pub struct GetReviewHistoryQuery {
pub movie_id: Uuid, pub movie_id: Uuid,
} }
pub struct GetActivityFeedQuery {
pub limit: Option<u32>,
pub offset: Option<u32>,
}
pub struct GetUsersQuery;
pub struct GetUserProfileQuery {
pub user_id: Uuid,
pub view: String,
pub limit: Option<u32>,
pub offset: Option<u32>,
}

View File

@@ -0,0 +1,27 @@
use domain::{errors::DomainError, value_objects::{ReviewId, UserId}};
use crate::{commands::DeleteReviewCommand, context::AppContext};
pub async fn execute(ctx: &AppContext, cmd: DeleteReviewCommand) -> Result<(), DomainError> {
let review_id = ReviewId::from_uuid(cmd.review_id);
let requesting_user_id = UserId::from_uuid(cmd.requesting_user_id);
let review = ctx
.repository
.get_review_by_id(&review_id)
.await?
.ok_or_else(|| DomainError::NotFound(format!("review {}", cmd.review_id)))?;
if review.user_id() != &requesting_user_id {
return Err(DomainError::Unauthorized("not your review".into()));
}
let movie_id = review.movie_id().clone();
ctx.repository.delete_review(&review_id).await?;
let history = ctx.repository.get_review_history(&movie_id).await?;
if history.viewings().is_empty() {
ctx.repository.delete_movie(&movie_id).await?;
}
Ok(())
}

View File

@@ -0,0 +1,13 @@
use domain::{
errors::DomainError,
models::{FeedEntry, collections::{PageParams, Paginated}},
};
use crate::{context::AppContext, queries::GetActivityFeedQuery};
pub async fn execute(
ctx: &AppContext,
query: GetActivityFeedQuery,
) -> Result<Paginated<FeedEntry>, DomainError> {
let page = PageParams::new(query.limit, query.offset)?;
ctx.repository.query_activity_feed(&page).await
}

View File

@@ -4,7 +4,7 @@ use domain::{
DiaryEntry, DiaryFilter, SortDirection, DiaryEntry, DiaryFilter, SortDirection,
collections::{PageParams, Paginated}, collections::{PageParams, Paginated},
}, },
value_objects::MovieId, value_objects::{MovieId, UserId},
}; };
use crate::{context::AppContext, queries::GetDiaryQuery}; use crate::{context::AppContext, queries::GetDiaryQuery};
@@ -14,16 +14,15 @@ pub async fn execute(
query: GetDiaryQuery, query: GetDiaryQuery,
) -> Result<Paginated<DiaryEntry>, DomainError> { ) -> Result<Paginated<DiaryEntry>, DomainError> {
let page = PageParams::new(query.limit, query.offset)?; let page = PageParams::new(query.limit, query.offset)?;
let movie_id = query.movie_id.map(MovieId::from_uuid); let movie_id = query.movie_id.map(MovieId::from_uuid);
let user_id = query.user_id.map(UserId::from_uuid);
let filter = DiaryFilter { let filter = DiaryFilter {
sort_by: query.sort_by.unwrap_or(SortDirection::Descending), sort_by: query.sort_by.unwrap_or(SortDirection::Descending),
page, page,
movie_id, movie_id,
user_id,
}; };
let paginated_results = ctx.repository.query_diary(&filter).await?; ctx.repository.query_diary(&filter).await
Ok(paginated_results)
} }

View File

@@ -0,0 +1,93 @@
use domain::{
errors::DomainError,
models::{
DiaryEntry, DiaryFilter, MonthActivity, SortDirection, UserStats, UserTrends,
collections::{PageParams, Paginated},
},
value_objects::UserId,
};
use crate::{context::AppContext, queries::GetUserProfileQuery};
pub struct UserProfileData {
pub stats: UserStats,
pub entries: Option<Paginated<DiaryEntry>>,
pub history: Option<Vec<MonthActivity>>,
pub trends: Option<UserTrends>,
}
pub async fn execute(
ctx: &AppContext,
query: GetUserProfileQuery,
) -> Result<UserProfileData, DomainError> {
let user_id = UserId::from_uuid(query.user_id);
let stats = ctx.repository.get_user_stats(&user_id).await?;
match query.view.as_str() {
"history" => {
// V1: loads all entries into memory. Personal diaries are bounded in size;
// spec calls for showing every movie grouped by month, so full load is intentional.
let all_entries = ctx.repository.get_user_history(&user_id).await?;
let history = group_by_month(all_entries);
Ok(UserProfileData { stats, entries: None, history: Some(history), trends: None })
}
"trends" => {
let trends = ctx.repository.get_user_trends(&user_id).await?;
Ok(UserProfileData { stats, entries: None, history: None, trends: Some(trends) })
}
"ratings" => {
let page = PageParams::new(query.limit, query.offset)?;
let filter = DiaryFilter {
sort_by: SortDirection::ByRatingDesc,
page,
movie_id: None,
user_id: Some(user_id),
};
let entries = ctx.repository.query_diary(&filter).await?;
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
}
"recent" => {
let page = PageParams::new(query.limit, query.offset)?;
let filter = DiaryFilter {
sort_by: SortDirection::Descending,
page,
movie_id: None,
user_id: Some(user_id),
};
let entries = ctx.repository.query_diary(&filter).await?;
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
}
other => Err(DomainError::ValidationError(format!("unknown view: {}", other))),
}
}
fn group_by_month(entries: Vec<DiaryEntry>) -> Vec<MonthActivity> {
use std::collections::BTreeMap;
let mut map: BTreeMap<String, Vec<DiaryEntry>> = BTreeMap::new();
for entry in entries {
let ym = entry.review().watched_at().format("%Y-%m").to_string();
map.entry(ym).or_default().push(entry);
}
let mut result: Vec<MonthActivity> = map
.into_iter()
.map(|(ym, entries)| MonthActivity {
month_label: format_year_month_long(&ym),
count: entries.len() as i64,
entries,
year_month: ym,
})
.collect();
result.reverse();
result
}
fn format_year_month_long(ym: &str) -> String {
let parts: Vec<&str> = ym.splitn(2, '-').collect();
if parts.len() != 2 { return ym.to_string(); }
let month = match parts[1] {
"01" => "January", "02" => "February", "03" => "March", "04" => "April",
"05" => "May", "06" => "June", "07" => "July", "08" => "August",
"09" => "September", "10" => "October", "11" => "November", "12" => "December",
_ => parts[1],
};
format!("{} {}", month, parts[0])
}

View File

@@ -0,0 +1,9 @@
use domain::{errors::DomainError, models::UserSummary};
use crate::{context::AppContext, queries::GetUsersQuery};
pub async fn execute(
ctx: &AppContext,
_query: GetUsersQuery,
) -> Result<Vec<UserSummary>, DomainError> {
ctx.user_repository.list_with_stats().await
}

View File

@@ -2,17 +2,25 @@ use domain::{
errors::DomainError, errors::DomainError,
events::DomainEvent, events::DomainEvent,
models::{Movie, Review}, models::{Movie, Review},
value_objects::{Comment, ExternalMetadataId, MovieTitle, Rating, ReleaseYear, UserId}, value_objects::{Comment, Rating, UserId},
}; };
use crate::{commands::LogReviewCommand, context::AppContext}; use crate::{
commands::LogReviewCommand,
context::AppContext,
movie_resolver::{MovieResolver, MovieResolverDeps},
};
pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), DomainError> { pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), DomainError> {
let rating = Rating::new(cmd.rating)?; let rating = Rating::new(cmd.rating)?;
let user_id = UserId::from_uuid(cmd.user_id); let user_id = UserId::from_uuid(cmd.user_id);
let comment = cmd.comment.clone().map(Comment::new).transpose()?; let comment = cmd.comment.clone().map(Comment::new).transpose()?;
let (movie, is_new_movie) = resolve_movie(ctx, &cmd).await?; let deps = MovieResolverDeps {
repository: ctx.repository.as_ref(),
metadata_client: ctx.metadata_client.as_ref(),
};
let (movie, is_new_movie) = MovieResolver::default_pipeline().resolve(&cmd, &deps).await?;
ctx.repository.upsert_movie(&movie).await?; ctx.repository.upsert_movie(&movie).await?;
@@ -24,76 +32,6 @@ pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), Doma
Ok(()) Ok(())
} }
async fn resolve_movie(
ctx: &AppContext,
cmd: &LogReviewCommand,
) -> Result<(Movie, bool), DomainError> {
if let Some(ext_id_str) = &cmd.external_metadata_id {
if let Some(resolved) = resolve_external_movie(ctx, ext_id_str).await? {
return Ok(resolved);
}
}
resolve_manual_movie(ctx, cmd).await
}
async fn resolve_external_movie(
ctx: &AppContext,
ext_id_str: &str,
) -> Result<Option<(Movie, bool)>, DomainError> {
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
if let Some(m) = ctx.repository.get_movie_by_external_id(&tmdb_id).await? {
return Ok(Some((m, false)));
}
match ctx.metadata_client.fetch_movie_metadata(&tmdb_id).await {
Ok(m) => Ok(Some((m, true))),
Err(e) => {
tracing::warn!(
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
e
);
Ok(None)
}
}
}
async fn resolve_manual_movie(
ctx: &AppContext,
cmd: &LogReviewCommand,
) -> Result<(Movie, bool), DomainError> {
let title_str = cmd.manual_title.as_ref().ok_or_else(|| {
DomainError::ValidationError(
"Manual title required if TMDB fetch fails or is omitted".into(),
)
})?;
let year_val = cmd.manual_release_year.ok_or_else(|| {
DomainError::ValidationError(
"Manual release year required if TMDB fetch fails or is omitted".into(),
)
})?;
let title = MovieTitle::new(title_str.clone())?;
let release_year = ReleaseYear::new(year_val)?;
let candidates = ctx
.repository
.get_movies_by_title_and_year(&title, &release_year)
.await?;
let matched_movie = candidates
.into_iter()
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
if let Some(existing_movie) = matched_movie {
Ok((existing_movie, false))
} else {
let new_movie = Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
Ok((new_movie, true))
}
}
async fn publish_events( async fn publish_events(
ctx: &AppContext, ctx: &AppContext,
movie: &Movie, movie: &Movie,

View File

@@ -0,0 +1,39 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
use domain::{errors::DomainError, value_objects::Email};
use crate::{commands::LoginCommand, context::AppContext};
pub struct LoginResult {
pub token: String,
pub user_id: Uuid,
pub email: String,
pub expires_at: DateTime<Utc>,
}
pub async fn execute(ctx: &AppContext, cmd: LoginCommand) -> Result<LoginResult, DomainError> {
let email = Email::new(cmd.email)?;
let user = ctx
.user_repository
.find_by_email(&email)
.await?
.ok_or_else(|| DomainError::Unauthorized("Invalid credentials".into()))?;
let valid = ctx
.password_hasher
.verify(&cmd.password, user.password_hash())
.await?;
if !valid {
return Err(DomainError::Unauthorized("Invalid credentials".into()));
}
let generated = ctx.auth_service.generate_token(user.id()).await?;
Ok(LoginResult {
token: generated.token,
user_id: user.id().value(),
email: user.email().value().to_string(),
expires_at: generated.expires_at,
})
}

View File

@@ -1,4 +1,10 @@
pub mod delete_review;
pub mod get_activity_feed;
pub mod get_diary; pub mod get_diary;
pub mod get_review_history; pub mod get_review_history;
pub mod get_user_profile;
pub mod get_users;
pub mod log_review; pub mod log_review;
pub mod login;
pub mod register;
pub mod sync_poster; pub mod sync_poster;

View File

@@ -0,0 +1,28 @@
use domain::{errors::DomainError, models::User, value_objects::Email};
use crate::{commands::RegisterCommand, context::AppContext};
const MIN_PASSWORD_LENGTH: usize = 8;
pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> {
if !ctx.config.allow_registration {
return Err(DomainError::Unauthorized("Registration is disabled".into()));
}
if cmd.password.len() < MIN_PASSWORD_LENGTH {
return Err(DomainError::ValidationError(
"Password must be at least 8 characters".into(),
));
}
let email = Email::new(cmd.email)?;
if ctx.user_repository.find_by_email(&email).await?.is_some() {
return Err(DomainError::ValidationError(
"Email already registered".into(),
));
}
let hash = ctx.password_hasher.hash(&cmd.password).await?;
ctx.user_repository.save(&User::new(email, hash)).await
}

View File

@@ -1,7 +0,0 @@
[package]
name = "common"
version = "0.1.0"
edition = "2024"
[dependencies]
thiserror = { workspace = true }

View File

@@ -1 +0,0 @@
pub mod errors;

View File

@@ -10,5 +10,4 @@ async-trait = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
common = { workspace = true }
email_address = "0.2.9" email_address = "0.2.9"

View File

@@ -13,4 +13,7 @@ pub enum DomainError {
#[error("Infrastructure failure: {0}")] #[error("Infrastructure failure: {0}")]
InfrastructureError(String), InfrastructureError(String),
#[error("Unauthorized: {0}")]
Unauthorized(String),
} }

View File

@@ -16,7 +16,7 @@ pub struct PageParams {
impl PageParams { impl PageParams {
const MAX_LIMIT: u32 = 100; const MAX_LIMIT: u32 = 100;
const DEFAULT_LIMIT: u32 = 20; const DEFAULT_LIMIT: u32 = 5;
pub fn new(limit: Option<u32>, offset: Option<u32>) -> Result<Self, DomainError> { pub fn new(limit: Option<u32>, offset: Option<u32>) -> Result<Self, DomainError> {
let l = limit.unwrap_or(Self::DEFAULT_LIMIT); let l = limit.unwrap_or(Self::DEFAULT_LIMIT);

View File

@@ -15,6 +15,7 @@ pub enum SortDirection {
#[default] #[default]
Descending, Descending,
Ascending, Ascending,
ByRatingDesc,
} }
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
@@ -22,6 +23,7 @@ pub struct DiaryFilter {
pub sort_by: SortDirection, pub sort_by: SortDirection,
pub page: PageParams, pub page: PageParams,
pub movie_id: Option<MovieId>, pub movie_id: Option<MovieId>,
pub user_id: Option<UserId>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -131,14 +133,6 @@ impl Review {
comment: Option<Comment>, comment: Option<Comment>,
watched_at: NaiveDateTime, watched_at: NaiveDateTime,
) -> Result<Self, DomainError> { ) -> Result<Self, DomainError> {
let now = Utc::now().naive_utc();
if watched_at > now {
return Err(DomainError::ValidationError(
"watched_at cannot be in the future".into(),
));
}
Ok(Self { Ok(Self {
id: ReviewId::generate(), id: ReviewId::generate(),
movie_id, movie_id,
@@ -146,7 +140,7 @@ impl Review {
rating, rating,
comment, comment,
watched_at, watched_at,
created_at: now, created_at: Utc::now().naive_utc(),
}) })
} }
@@ -191,6 +185,11 @@ impl Review {
pub fn created_at(&self) -> &NaiveDateTime { pub fn created_at(&self) -> &NaiveDateTime {
&self.created_at &self.created_at
} }
/// Returns [star1_filled, star2_filled, ..., star5_filled]
pub fn stars(&self) -> [bool; 5] {
let r = self.rating.value();
[r >= 1, r >= 2, r >= 3, r >= 4, r >= 5]
}
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -250,6 +249,10 @@ impl User {
} }
} }
pub fn from_persistence(id: UserId, email: Email, password_hash: PasswordHash) -> Self {
Self { id, email, password_hash }
}
pub fn update_password(&mut self, new_hash: PasswordHash) { pub fn update_password(&mut self, new_hash: PasswordHash) {
self.password_hash = new_hash; self.password_hash = new_hash;
} }
@@ -266,3 +269,90 @@ impl User {
&self.password_hash &self.password_hash
} }
} }
#[derive(Clone, Debug)]
pub struct FeedEntry {
entry: DiaryEntry,
user_email: String,
}
impl FeedEntry {
pub fn new(entry: DiaryEntry, user_email: String) -> Self {
Self { entry, user_email }
}
pub fn movie(&self) -> &Movie { self.entry.movie() }
pub fn review(&self) -> &Review { self.entry.review() }
pub fn user_email(&self) -> &str { &self.user_email }
pub fn user_display_name(&self) -> &str {
self.user_email.split('@').next().unwrap_or(&self.user_email)
}
}
#[derive(Clone, Debug)]
pub struct UserSummary {
pub user_id: UserId,
pub email: String,
pub total_movies: i64,
pub avg_rating: Option<f64>,
}
impl UserSummary {
pub fn display_name(&self) -> &str {
self.email.split('@').next().unwrap_or(&self.email)
}
pub fn avg_rating_display(&self) -> String {
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "".to_string())
}
pub fn initial(&self) -> char {
self.display_name().chars().next().unwrap_or('?').to_ascii_uppercase()
}
}
#[derive(Clone, Debug)]
pub struct UserStats {
pub total_movies: i64,
pub avg_rating: Option<f64>,
pub favorite_director: Option<String>,
pub most_active_month: Option<String>,
}
impl UserStats {
pub fn avg_rating_display(&self) -> String {
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "".to_string())
}
pub fn favorite_director_display(&self) -> &str {
self.favorite_director.as_deref().unwrap_or("")
}
pub fn most_active_month_display(&self) -> &str {
self.most_active_month.as_deref().unwrap_or("")
}
}
#[derive(Clone, Debug)]
pub struct MonthActivity {
pub year_month: String,
pub month_label: String,
pub count: i64,
pub entries: Vec<DiaryEntry>,
}
#[derive(Clone, Debug)]
pub struct MonthlyRating {
pub year_month: String,
pub month_label: String,
pub avg_rating: f64,
pub count: i64,
}
#[derive(Clone, Debug)]
pub struct DirectorStat {
pub director: String,
pub count: i64,
}
#[derive(Clone, Debug)]
pub struct UserTrends {
pub monthly_ratings: Vec<MonthlyRating>,
pub top_directors: Vec<DirectorStat>,
pub max_director_count: i64,
}

View File

@@ -1,12 +1,17 @@
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Utc};
use crate::{ use crate::{
errors::DomainError, errors::DomainError,
events::DomainEvent, events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated}, models::{
DiaryEntry, DiaryFilter, FeedEntry, Movie, Review, ReviewHistory, User, UserStats,
UserTrends, UserSummary,
collections::{PageParams, Paginated},
},
value_objects::{ value_objects::{
ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl, ReleaseYear, Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
UserId, ReleaseYear, ReviewId, UserId,
}, },
}; };
@@ -31,13 +36,35 @@ pub trait MovieRepository: Send + Sync {
-> Result<Paginated<DiaryEntry>, DomainError>; -> Result<Paginated<DiaryEntry>, DomainError>;
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError>; async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError>;
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError>;
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError>;
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError>;
async fn query_activity_feed(
&self,
page: &PageParams,
) -> Result<Paginated<FeedEntry>, DomainError>;
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError>;
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError>;
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError>;
}
pub enum MetadataSearchCriteria {
ImdbId(ExternalMetadataId),
Title { title: String, year: Option<u16> },
} }
#[async_trait] #[async_trait]
pub trait MetadataClient: Send + Sync { pub trait MetadataClient: Send + Sync {
async fn fetch_movie_metadata( async fn fetch_movie_metadata(
&self, &self,
external_metadata_id: &ExternalMetadataId, criteria: &MetadataSearchCriteria,
) -> Result<Movie, DomainError>; ) -> Result<Movie, DomainError>;
async fn get_poster_url( async fn get_poster_url(
&self, &self,
@@ -61,11 +88,26 @@ pub trait PosterStorage: Send + Sync {
async fn get_poster(&self, poster_path: &PosterPath) -> Result<Vec<u8>, DomainError>; async fn get_poster(&self, poster_path: &PosterPath) -> Result<Vec<u8>, DomainError>;
} }
pub struct GeneratedToken {
pub token: String,
pub expires_at: DateTime<Utc>,
}
#[async_trait] #[async_trait]
pub trait AuthService: Send + Sync { pub trait AuthService: Send + Sync {
async fn generate_token(&self, user_id: &UserId) -> Result<GeneratedToken, DomainError>;
async fn validate_token(&self, token: &str) -> Result<UserId, DomainError>; async fn validate_token(&self, token: &str) -> Result<UserId, DomainError>;
} }
#[async_trait]
pub trait UserRepository: Send + Sync {
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError>;
async fn save(&self, user: &User) -> Result<(), DomainError>;
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError>;
async fn list_with_stats(&self) -> Result<Vec<UserSummary>, DomainError>;
}
#[async_trait] #[async_trait]
pub trait EventPublisher: Send + Sync { pub trait EventPublisher: Send + Sync {
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError>; async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError>;

View File

@@ -14,6 +14,7 @@ thiserror = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
dotenvy = { workspace = true }
uuid = { workspace = true } uuid = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
async-trait = { workspace = true } async-trait = { workspace = true }
@@ -21,9 +22,15 @@ async-trait = { workspace = true }
domain = { workspace = true } domain = { workspace = true }
application = { workspace = true } application = { workspace = true }
auth = { workspace = true } auth = { workspace = true }
metadata = { workspace = true }
poster-fetcher = { workspace = true }
poster-storage = { workspace = true }
sqlite = { workspace = true } sqlite = { workspace = true }
sqlx = { workspace = true } sqlx = { workspace = true }
template-askama = { workspace = true } template-askama = { workspace = true }
event-publisher = { workspace = true }
rss = { workspace = true }
infer = "0.19.0"
[dev-dependencies] [dev-dependencies]
tower = { version = "0.5", features = ["util"] } tower = { version = "0.5", features = ["util"] }

View File

@@ -1,6 +1,23 @@
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use application::{commands::LogReviewCommand, queries::GetDiaryQuery};
use domain::{errors::DomainError, models::SortDirection};
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
where
D: serde::Deserializer<'de>,
T: std::str::FromStr,
T::Err: std::fmt::Display,
{
let s = Option::<String>::deserialize(de)?;
match s.as_deref() {
None | Some("") => Ok(None),
Some(s) => s.parse::<T>().map(Some).map_err(serde::de::Error::custom),
}
}
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct DiaryQueryParams { pub struct DiaryQueryParams {
pub limit: Option<u32>, pub limit: Option<u32>,
@@ -11,15 +28,37 @@ pub struct DiaryQueryParams {
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct LogReviewForm { pub struct LogReviewForm {
#[serde(default, deserialize_with = "empty_string_as_none")]
pub external_metadata_id: Option<String>, pub external_metadata_id: Option<String>,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub manual_title: Option<String>, pub manual_title: Option<String>,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub manual_release_year: Option<u16>, pub manual_release_year: Option<u16>,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub manual_director: Option<String>, pub manual_director: Option<String>,
pub rating: u8, pub rating: u8,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub comment: Option<String>, pub comment: Option<String>,
pub watched_at: String, pub watched_at: String,
} }
#[derive(Deserialize)]
pub struct LoginForm {
pub email: String,
pub password: String,
}
#[derive(Deserialize)]
pub struct RegisterForm {
pub email: String,
pub password: String,
}
#[derive(Deserialize)]
pub struct ErrorQuery {
pub error: Option<String>,
}
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct LogReviewRequest { pub struct LogReviewRequest {
pub external_metadata_id: Option<String>, pub external_metadata_id: Option<String>,
@@ -78,12 +117,229 @@ pub struct LoginRequest {
#[derive(Serialize)] #[derive(Serialize)]
pub struct LoginResponse { pub struct LoginResponse {
pub token: String, pub token: String,
pub user_id: Uuid,
pub email: String,
pub expires_at: String,
}
#[derive(Deserialize)]
pub struct RegisterRequest {
pub email: String,
pub password: String,
}
pub struct LogReviewData {
pub external_metadata_id: Option<String>,
pub manual_title: Option<String>,
pub manual_release_year: Option<u16>,
pub manual_director: Option<String>,
pub rating: u8,
pub comment: Option<String>,
pub watched_at: NaiveDateTime,
}
#[derive(Debug)]
pub struct ParseReviewError {
pub field: &'static str,
pub message: String,
}
impl TryFrom<LogReviewForm> for LogReviewData {
type Error = ParseReviewError;
fn try_from(form: LogReviewForm) -> Result<Self, Self::Error> {
let watched_at = NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M:%S")
.or_else(|_| NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M"))
.map_err(|_| ParseReviewError {
field: "watched_at",
message: format!(
"invalid date '{}'; expected YYYY-MM-DDTHH:MM[:SS]",
form.watched_at
),
})?;
Ok(Self {
external_metadata_id: form.external_metadata_id.filter(|s| !s.trim().is_empty()),
manual_title: form.manual_title,
manual_release_year: form.manual_release_year,
manual_director: form.manual_director,
rating: form.rating,
comment: form.comment,
watched_at,
})
}
}
impl TryFrom<LogReviewRequest> for LogReviewData {
type Error = DomainError;
fn try_from(req: LogReviewRequest) -> Result<Self, Self::Error> {
let watched_at = NaiveDateTime::parse_from_str(&req.watched_at, "%Y-%m-%dT%H:%M:%S")
.map_err(|_| {
DomainError::ValidationError(
"invalid watched_at; expected YYYY-MM-DDTHH:MM:SS".into(),
)
})?;
Ok(Self {
external_metadata_id: req.external_metadata_id.filter(|s| !s.trim().is_empty()),
manual_title: req.manual_title,
manual_release_year: req.manual_release_year,
manual_director: req.manual_director,
rating: req.rating,
comment: req.comment,
watched_at,
})
}
}
impl LogReviewData {
pub fn into_command(self, user_id: Uuid) -> LogReviewCommand {
LogReviewCommand {
external_metadata_id: self.external_metadata_id,
manual_title: self.manual_title,
manual_release_year: self.manual_release_year,
manual_director: self.manual_director,
rating: self.rating,
comment: self.comment,
watched_at: self.watched_at,
user_id,
}
}
}
impl From<DiaryQueryParams> for GetDiaryQuery {
fn from(p: DiaryQueryParams) -> Self {
GetDiaryQuery {
limit: p.limit,
offset: p.offset,
sort_by: p.sort_by.as_deref().map(|s| {
if s == "asc" {
SortDirection::Ascending
} else {
SortDirection::Descending
}
}),
movie_id: p.movie_id,
user_id: None,
}
}
}
#[derive(serde::Deserialize, Default)]
pub struct ProfileQueryParams {
pub view: Option<String>,
pub limit: Option<u32>,
pub offset: Option<u32>,
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
fn make_form(watched_at: &str) -> LogReviewForm {
LogReviewForm {
external_metadata_id: None,
manual_title: None,
manual_release_year: None,
manual_director: None,
rating: 4,
comment: None,
watched_at: watched_at.to_string(),
}
}
fn make_request(watched_at: &str) -> LogReviewRequest {
LogReviewRequest {
external_metadata_id: None,
manual_title: None,
manual_release_year: None,
manual_director: None,
rating: 4,
comment: None,
watched_at: watched_at.to_string(),
}
}
#[test]
fn form_accepts_datetime_with_seconds() {
let data = LogReviewData::try_from(make_form("2024-03-15T20:30:00")).unwrap();
assert_eq!(data.watched_at.format("%H:%M:%S").to_string(), "20:30:00");
}
#[test]
fn form_accepts_datetime_without_seconds() {
let data = LogReviewData::try_from(make_form("2024-03-15T20:30")).unwrap();
assert_eq!(data.watched_at.format("%H:%M").to_string(), "20:30");
}
#[test]
fn form_rejects_invalid_datetime() {
assert!(LogReviewData::try_from(make_form("not-a-date")).is_err());
}
#[test]
fn api_accepts_datetime_with_seconds() {
let data = LogReviewData::try_from(make_request("2024-03-15T20:30:00")).unwrap();
assert_eq!(data.watched_at.format("%H:%M:%S").to_string(), "20:30:00");
}
#[test]
fn api_rejects_datetime_without_seconds() {
assert!(LogReviewData::try_from(make_request("2024-03-15T20:30")).is_err());
}
#[test]
fn api_rejects_invalid_datetime() {
assert!(LogReviewData::try_from(make_request("garbage")).is_err());
}
#[test]
fn whitespace_external_id_becomes_none_in_form() {
let mut form = make_form("2024-03-15T20:30:00");
form.external_metadata_id = Some(" ".to_string());
let data = LogReviewData::try_from(form).unwrap();
assert!(data.external_metadata_id.is_none());
}
#[test]
fn whitespace_external_id_becomes_none_in_request() {
let mut req = make_request("2024-03-15T20:30:00");
req.external_metadata_id = Some(" ".to_string());
let data = LogReviewData::try_from(req).unwrap();
assert!(data.external_metadata_id.is_none());
}
#[test]
fn into_command_sets_user_id() {
let data = LogReviewData::try_from(make_form("2024-03-15T20:30:00")).unwrap();
let user_id = Uuid::new_v4();
let cmd = data.into_command(user_id);
assert_eq!(cmd.user_id, user_id);
}
#[test]
fn sort_by_asc_string_becomes_ascending() {
let params = DiaryQueryParams {
sort_by: Some("asc".to_string()),
limit: None,
offset: None,
movie_id: None,
};
let query = GetDiaryQuery::from(params);
assert!(matches!(query.sort_by, Some(domain::models::SortDirection::Ascending)));
}
#[test]
fn sort_by_other_string_becomes_descending() {
let params = DiaryQueryParams {
sort_by: Some("desc".to_string()),
limit: None,
offset: None,
movie_id: None,
};
let query = GetDiaryQuery::from(params);
assert!(matches!(query.sort_by, Some(domain::models::SortDirection::Descending)));
}
#[test] #[test]
fn diary_response_serializes_correctly() { fn diary_response_serializes_correctly() {
let resp = DiaryResponse { let resp = DiaryResponse {

View File

@@ -18,6 +18,7 @@ impl IntoResponse for ApiError {
DomainError::InvalidRating { .. } => (StatusCode::BAD_REQUEST, self.0.to_string()), DomainError::InvalidRating { .. } => (StatusCode::BAD_REQUEST, self.0.to_string()),
DomainError::ValidationError(msg) => (StatusCode::BAD_REQUEST, msg), DomainError::ValidationError(msg) => (StatusCode::BAD_REQUEST, msg),
DomainError::NotFound(msg) => (StatusCode::NOT_FOUND, msg), DomainError::NotFound(msg) => (StatusCode::NOT_FOUND, msg),
DomainError::Unauthorized(msg) => (StatusCode::UNAUTHORIZED, msg),
DomainError::InfrastructureError(_) => { DomainError::InfrastructureError(_) => {
tracing::error!("Internal Infrastructure Error: {:?}", self.0); tracing::error!("Internal Infrastructure Error: {:?}", self.0);
( (

View File

@@ -0,0 +1,179 @@
use std::time::Duration;
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
use async_trait::async_trait;
use domain::{errors::DomainError, events::DomainEvent};
use event_publisher::EventHandler;
pub struct PosterSyncHandler {
ctx: AppContext,
max_retries: u32,
}
impl PosterSyncHandler {
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
Self { ctx, max_retries }
}
}
#[async_trait]
impl EventHandler for PosterSyncHandler {
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
let (movie_id, external_metadata_id) = match event {
DomainEvent::MovieDiscovered {
movie_id,
external_metadata_id,
} => (movie_id.value(), external_metadata_id.value().to_owned()),
_ => return Ok(()),
};
let mut last_err: Option<DomainError> = None;
for attempt in 0..=self.max_retries {
let cmd = SyncPosterCommand {
movie_id,
external_metadata_id: external_metadata_id.clone(),
};
match sync_poster::execute(&self.ctx, cmd).await {
Ok(()) => return Ok(()),
Err(e) => {
if attempt < self.max_retries {
let delay = Duration::from_secs(2u64.pow(attempt));
tracing::warn!(
attempt = attempt + 1,
max_attempts = self.max_retries + 1,
delay_secs = delay.as_secs(),
"poster sync failed, retrying: {e}"
);
tokio::time::sleep(delay).await;
}
last_err = Some(e);
}
}
}
let err = last_err.expect("loop runs at least once and always sets last_err on Err");
tracing::error!(
attempts = self.max_retries + 1,
"poster sync failed after all attempts: {err}"
);
Err(err)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::Arc;
use application::config::AppConfig;
use domain::{
errors::DomainError,
events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
ports::{
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
},
value_objects::{
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
Rating, ReleaseYear, ReviewId, UserId,
},
};
// Panic-stub ports: each method panics so any accidental dispatch into a service
// fails the test loudly rather than silently succeeding.
struct PanicRepo;
struct PanicMetadata;
struct PanicFetcher;
struct PanicStorage;
struct PanicAuth;
struct PanicHasher;
struct PanicUserRepo;
struct NoopPublisher;
#[async_trait]
impl MovieRepository for PanicRepo {
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
async fn get_review_by_id(&self, _: &ReviewId) -> Result<Option<Review>, DomainError> { panic!("unexpected") }
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> { panic!("unexpected") }
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { panic!("unexpected") }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
async fn get_user_stats(&self, _: &UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
async fn get_user_history(&self, _: &UserId) -> Result<Vec<DiaryEntry>, DomainError> { panic!("unexpected") }
async fn get_user_trends(&self, _: &UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl MetadataClient for PanicMetadata {
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl PosterFetcherClient for PanicFetcher {
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl PosterStorage for PanicStorage {
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl AuthService for PanicAuth {
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl PasswordHasher for PanicHasher {
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl UserRepository for PanicUserRepo {
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<User>, DomainError> { panic!("unexpected") }
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> { panic!("unexpected") }
}
#[async_trait]
impl EventPublisher for NoopPublisher {
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
}
fn panic_ctx() -> AppContext {
AppContext {
repository: Arc::new(PanicRepo),
metadata_client: Arc::new(PanicMetadata),
poster_fetcher: Arc::new(PanicFetcher),
poster_storage: Arc::new(PanicStorage),
event_publisher: Arc::new(NoopPublisher),
auth_service: Arc::new(PanicAuth),
password_hasher: Arc::new(PanicHasher),
user_repository: Arc::new(PanicUserRepo),
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
}
}
#[tokio::test]
async fn review_logged_is_ignored() {
let handler = PosterSyncHandler::new(panic_ctx(), 3);
let event = DomainEvent::ReviewLogged {
review_id: ReviewId::generate(),
movie_id: MovieId::generate(),
user_id: UserId::generate(),
rating: Rating::new(4).unwrap(),
watched_at: chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap().and_hms_opt(0, 0, 0).unwrap(),
};
assert!(handler.handle(&event).await.is_ok());
}
}

View File

@@ -1,6 +1,7 @@
use axum::{ use axum::{
extract::{FromRef, FromRequestParts}, extract::{FromRef, FromRequestParts},
http::{header::AUTHORIZATION, request::Parts}, http::{header, header::AUTHORIZATION, request::Parts},
response::{IntoResponse, Redirect},
}; };
use domain::{errors::DomainError, value_objects::UserId}; use domain::{errors::DomainError, value_objects::UserId};
@@ -23,8 +24,8 @@ where
.and_then(|v| v.to_str().ok()) .and_then(|v| v.to_str().ok())
.and_then(|v| v.strip_prefix("Bearer ")) .and_then(|v| v.strip_prefix("Bearer "))
.ok_or_else(|| { .ok_or_else(|| {
ApiError(DomainError::ValidationError( ApiError(DomainError::Unauthorized(
"Missing auth token".into(), "Missing or invalid auth token".into(),
)) ))
})?; })?;
let user_id = app_state let user_id = app_state
@@ -36,6 +37,64 @@ where
} }
} }
pub struct OptionalCookieUser(pub Option<UserId>);
pub struct RequiredCookieUser(pub UserId);
fn extract_token_from_cookie(parts: &Parts) -> Option<String> {
parts
.headers
.get(header::COOKIE)
.and_then(|v| v.to_str().ok())
.and_then(|cookies| {
cookies
.split(';')
.find_map(|c| c.trim().strip_prefix("token=").map(str::to_string))
})
}
impl<S> FromRequestParts<S> for OptionalCookieUser
where
AppState: FromRef<S>,
S: Send + Sync,
{
type Rejection = std::convert::Infallible;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
let app_state = AppState::from_ref(state);
let Some(token) = extract_token_from_cookie(parts) else {
return Ok(OptionalCookieUser(None));
};
let user_id = app_state
.app_ctx
.auth_service
.validate_token(&token)
.await
.ok();
Ok(OptionalCookieUser(user_id))
}
}
impl<S> FromRequestParts<S> for RequiredCookieUser
where
AppState: FromRef<S>,
S: Send + Sync,
{
type Rejection = axum::response::Response;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
let app_state = AppState::from_ref(state);
let token = extract_token_from_cookie(parts)
.ok_or_else(|| Redirect::to("/login").into_response())?;
let user_id = app_state
.app_ctx
.auth_service
.validate_token(&token)
.await
.map_err(|_| Redirect::to("/login").into_response())?;
Ok(RequiredCookieUser(user_id))
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@@ -58,10 +117,9 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
async fn missing_auth_header_returns_400() { async fn missing_auth_header_returns_401() {
use std::sync::Arc; use std::sync::Arc;
use application::context::AppContext; use application::context::AppContext;
use auth::StubAuthService;
struct PanicRepo; struct PanicRepo;
#[async_trait::async_trait] #[async_trait::async_trait]
@@ -73,19 +131,39 @@ mod tests {
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() } async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() } async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() } async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
} }
struct PanicRenderer; struct PanicRenderer;
impl crate::ports::HtmlRenderer for PanicRenderer { impl crate::ports::HtmlRenderer for PanicRenderer {
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>) -> Result<String, String> { panic!() } fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
} }
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher; struct PanicRssRenderer;
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta { async fn fetch_movie_metadata(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } } impl crate::ports::RssFeedRenderer for PanicRssRenderer {
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
}
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher; struct PanicAuth; struct PanicUserRepo;
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } } #[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
let state = crate::state::AppState { let state = crate::state::AppState {
app_ctx: AppContext { app_ctx: AppContext {
@@ -94,10 +172,13 @@ mod tests {
poster_fetcher: Arc::new(PanicFetcher), poster_fetcher: Arc::new(PanicFetcher),
poster_storage: Arc::new(PanicStorage), poster_storage: Arc::new(PanicStorage),
event_publisher: Arc::new(PanicEvent), event_publisher: Arc::new(PanicEvent),
auth_service: Arc::new(StubAuthService), auth_service: Arc::new(PanicAuth),
password_hasher: Arc::new(PanicHasher), password_hasher: Arc::new(PanicHasher),
user_repository: Arc::new(PanicUserRepo),
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
}, },
html_renderer: Arc::new(PanicRenderer), html_renderer: Arc::new(PanicRenderer),
rss_renderer: Arc::new(PanicRssRenderer),
}; };
let app = test_router(state); let app = test_router(state);
@@ -111,6 +192,217 @@ mod tests {
.await .await
.unwrap(); .unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST); assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
// Reusable helpers for cookie extractor tests
async fn optional_cookie_handler(user: OptionalCookieUser) -> String {
match user.0 {
Some(id) => id.value().to_string(),
None => "none".to_string(),
}
}
async fn required_cookie_handler(user: RequiredCookieUser) -> String {
user.0.value().to_string()
}
fn test_router_optional(state: crate::state::AppState) -> Router {
Router::new()
.route("/optional", get(optional_cookie_handler))
.with_state(state)
}
fn test_router_required(state: crate::state::AppState) -> Router {
Router::new()
.route("/required", get(required_cookie_handler))
.with_state(state)
}
struct RejectingAuth;
#[async_trait::async_trait]
impl domain::ports::AuthService for RejectingAuth {
async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() }
async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> {
Err(domain::errors::DomainError::Unauthorized("bad token".into()))
}
}
fn panic_state() -> crate::state::AppState {
use std::sync::Arc;
use application::context::AppContext;
struct PanicRepo2;
#[async_trait::async_trait]
impl domain::ports::MovieRepository for PanicRepo2 {
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
}
struct PanicMeta2; struct PanicFetcher2; struct PanicStorage2; struct PanicEvent2; struct PanicHasher2; struct PanicUserRepo2;
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta2 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher2 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage2 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent2 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher2 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth2 { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo2 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
struct PanicRenderer2;
impl crate::ports::HtmlRenderer for PanicRenderer2 {
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
}
struct PanicRssRenderer2;
impl crate::ports::RssFeedRenderer for PanicRssRenderer2 {
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
}
struct PanicAuth2;
crate::state::AppState {
app_ctx: AppContext {
repository: Arc::new(PanicRepo2),
metadata_client: Arc::new(PanicMeta2),
poster_fetcher: Arc::new(PanicFetcher2),
poster_storage: Arc::new(PanicStorage2),
event_publisher: Arc::new(PanicEvent2),
auth_service: Arc::new(PanicAuth2),
password_hasher: Arc::new(PanicHasher2),
user_repository: Arc::new(PanicUserRepo2),
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
},
html_renderer: Arc::new(PanicRenderer2),
rss_renderer: Arc::new(PanicRssRenderer2),
}
}
fn rejecting_state() -> crate::state::AppState {
use std::sync::Arc;
use application::context::AppContext;
struct PanicRepo3;
#[async_trait::async_trait]
impl domain::ports::MovieRepository for PanicRepo3 {
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
}
struct PanicMeta3; struct PanicFetcher3; struct PanicStorage3; struct PanicEvent3; struct PanicHasher3; struct PanicUserRepo3;
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta3 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher3 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage3 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent3 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher3 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo3 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
struct PanicRenderer3;
impl crate::ports::HtmlRenderer for PanicRenderer3 {
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
}
struct PanicRssRenderer3;
impl crate::ports::RssFeedRenderer for PanicRssRenderer3 {
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
}
crate::state::AppState {
app_ctx: AppContext {
repository: Arc::new(PanicRepo3),
metadata_client: Arc::new(PanicMeta3),
poster_fetcher: Arc::new(PanicFetcher3),
poster_storage: Arc::new(PanicStorage3),
event_publisher: Arc::new(PanicEvent3),
auth_service: Arc::new(RejectingAuth),
password_hasher: Arc::new(PanicHasher3),
user_repository: Arc::new(PanicUserRepo3),
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
},
html_renderer: Arc::new(PanicRenderer3),
rss_renderer: Arc::new(PanicRssRenderer3),
}
}
#[tokio::test]
async fn optional_cookie_user_returns_none_without_cookie() {
let app = test_router_optional(panic_state());
let response = app
.oneshot(Request::builder().uri("/optional").body(Body::empty()).unwrap())
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
assert_eq!(&body[..], b"none");
}
#[tokio::test]
async fn optional_cookie_user_returns_none_with_invalid_token() {
let app = test_router_optional(rejecting_state());
let response = app
.oneshot(
Request::builder()
.uri("/optional")
.header("cookie", "token=bad.token.here")
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
assert_eq!(&body[..], b"none");
}
#[tokio::test]
async fn required_cookie_user_redirects_without_cookie() {
let app = test_router_required(panic_state());
let response = app
.oneshot(Request::builder().uri("/required").body(Body::empty()).unwrap())
.await
.unwrap();
assert_eq!(response.status(), StatusCode::SEE_OTHER);
assert_eq!(response.headers().get("location").unwrap(), "/login");
}
#[tokio::test]
async fn required_cookie_user_redirects_with_invalid_token() {
let app = test_router_required(rejecting_state());
let response = app
.oneshot(
Request::builder()
.uri("/required")
.header("cookie", "token=bad.token.here")
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::SEE_OTHER);
assert_eq!(response.headers().get("location").unwrap(), "/login");
} }
} }

Some files were not shown because too many files have changed in this diff Show More