Compare commits
80 Commits
f790fa2a0f
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 7a66661932 | |||
| b30a6a102b | |||
| 38a3aa6bbf | |||
| 3135a15cb3 | |||
| d083f8ae3d | |||
| 874c406d4a | |||
| 78e1f4ef72 | |||
| cf74b06b4a | |||
| 317898d51b | |||
| 790bb6fbb5 | |||
| 658df38788 | |||
| cff0f854fa | |||
| 66ade70273 | |||
| cbd2ac5b3e | |||
| 0433cd4d9b | |||
| b5a8ea2395 | |||
| 49b79799c1 | |||
| f4aba551a2 | |||
| 91df35dbd3 | |||
| 623f90e43f | |||
| e28f628c80 | |||
| 60c25d4c24 | |||
| 22aafe99be | |||
| 0ff22cca5f | |||
| ccc39e27e4 | |||
| 76319756f4 | |||
| 7703227970 | |||
| b9933bb48d | |||
| 0c48708ce6 | |||
| a2a889bced | |||
| a4846f3bea | |||
| 27be840faa | |||
| 965fc0eda8 | |||
| d700b85337 | |||
| ffbab75910 | |||
| dda7c40f7f | |||
| 1b827b1bdd | |||
| 1ee6873a60 | |||
| 7352b533ff | |||
| 85e254fee2 | |||
| fa8221322d | |||
| 38da37de55 | |||
| f3dedbad8a | |||
| d468ce131f | |||
| d034af9e9c | |||
| 59d308f41b | |||
| bbb2ee00d6 | |||
| 5dd9aac68d | |||
| 6dcc4c8317 | |||
| e31d99a240 | |||
| 41fec1efa5 | |||
| 160c08d1c4 | |||
| 7aa6d7bf4d | |||
| 144f2f8e0c | |||
| cff64f7a6b | |||
| 5baff54cb9 | |||
| f94d2db8b1 | |||
| 48875a6e86 | |||
| 9387ae705b | |||
| 9871e21bc0 | |||
| fa8efbaa23 | |||
| d769a5b55c | |||
| 8e1fb1a974 | |||
| 6145b873f5 | |||
| cc668ae44d | |||
| e5097c22dd | |||
| 450468ef3d | |||
| 6e7c6467a7 | |||
| 7f815f8207 | |||
| 5df89200d4 | |||
| eb273dc277 | |||
| 5689db0ad7 | |||
| 5c70b8b8be | |||
| 4c547df04e | |||
| 602df8df22 | |||
| 5b69a3a7c0 | |||
| a38f78d261 | |||
| 17f90726e8 | |||
| 563f33212e | |||
| 8e5ac9f433 |
10
.dockerignore
Normal file
10
.dockerignore
Normal file
@@ -0,0 +1,10 @@
|
||||
target/
|
||||
.git/
|
||||
.env
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
.cargo/
|
||||
.sqlx/
|
||||
docs/
|
||||
dev.db
|
||||
@@ -1,5 +1,7 @@
|
||||
DATABASE_URL=sqlite:./dev.db
|
||||
BASE_URL=http://localhost:3000
|
||||
PORT=3000
|
||||
SECURE_COOKIES=false
|
||||
JWT_SECRET=
|
||||
JWT_TTL_SECONDS=
|
||||
ALLOW_REGISTRATION=true
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -8,5 +8,9 @@
|
||||
.env.prod
|
||||
|
||||
*.db
|
||||
*db-shm
|
||||
*db-wal
|
||||
|
||||
.worktrees/
|
||||
.superpowers/
|
||||
docs/
|
||||
|
||||
20
.sqlx/query-0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23.json
generated
Normal file
20
.sqlx/query-0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) FROM reviews WHERE user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "COUNT(*)",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23"
|
||||
}
|
||||
32
.sqlx/query-1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399.json
generated
Normal file
32
.sqlx/query-1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399.json
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
|
||||
}
|
||||
98
.sqlx/query-217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee.json
generated
Normal file
98
.sqlx/query-217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee.json
generated
Normal file
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,\n u.email AS user_email\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n INNER JOIN users u ON u.id = r.user_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_email",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee"
|
||||
}
|
||||
20
.sqlx/query-4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75.json
generated
Normal file
20
.sqlx/query-4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT strftime('%Y-%m', watched_at) AS month\n FROM reviews\n WHERE user_id = ?\n GROUP BY month\n ORDER BY COUNT(*) DESC\n LIMIT 1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "month",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75"
|
||||
}
|
||||
92
.sqlx/query-5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199.json
generated
Normal file
92
.sqlx/query-5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199"
|
||||
}
|
||||
56
.sqlx/query-70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2.json
generated
Normal file
56
.sqlx/query-70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2.json
generated
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2"
|
||||
}
|
||||
92
.sqlx/query-8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e.json
generated
Normal file
92
.sqlx/query-8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e"
|
||||
}
|
||||
26
.sqlx/query-a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22.json
generated
Normal file
26
.sqlx/query-a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22.json
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(DISTINCT movie_id) AS \"total!: i64\",\n AVG(CAST(rating AS REAL)) AS avg_rating\n FROM reviews WHERE user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "total!: i64",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "avg_rating",
|
||||
"ordinal": 1,
|
||||
"type_info": "Float"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22"
|
||||
}
|
||||
92
.sqlx/query-a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c.json
generated
Normal file
92
.sqlx/query-a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.rating DESC, r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c"
|
||||
}
|
||||
26
.sqlx/query-aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e.json
generated
Normal file
26
.sqlx/query-aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e.json
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.director AS \"director!\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 5",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "director!",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "count!: i64",
|
||||
"ordinal": 1,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e"
|
||||
}
|
||||
20
.sqlx/query-d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2.json
generated
Normal file
20
.sqlx/query-d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.director\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2"
|
||||
}
|
||||
12
.sqlx/query-e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883.json
generated
Normal file
12
.sqlx/query-e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "DELETE FROM movies WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883"
|
||||
}
|
||||
38
.sqlx/query-f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318.json
generated
Normal file
38
.sqlx/query-f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318.json
generated
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT u.id,\n u.email,\n COUNT(DISTINCT r.movie_id) AS \"total_movies!: i64\",\n AVG(CAST(r.rating AS REAL)) AS avg_rating\n FROM users u\n LEFT JOIN reviews r ON r.user_id = u.id\n GROUP BY u.id, u.email\n ORDER BY u.email ASC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "total_movies!: i64",
|
||||
"ordinal": 2,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "avg_rating",
|
||||
"ordinal": 3,
|
||||
"type_info": "Float"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318"
|
||||
}
|
||||
12
.sqlx/query-f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc.json
generated
Normal file
12
.sqlx/query-f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "DELETE FROM reviews WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc"
|
||||
}
|
||||
32
.sqlx/query-fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317.json
generated
Normal file
32
.sqlx/query-fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317.json
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT strftime('%Y-%m', watched_at) AS \"month!\",\n AVG(CAST(rating AS REAL)) AS \"avg_rating!: f64\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews\n WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')\n GROUP BY \"month!\"\n ORDER BY \"month!\" ASC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "month!",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "avg_rating!: f64",
|
||||
"ordinal": 1,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "count!: i64",
|
||||
"ordinal": 2,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317"
|
||||
}
|
||||
44
Cargo.lock
generated
44
Cargo.lock
generated
@@ -36,8 +36,10 @@ checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
|
||||
name = "application"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"chrono",
|
||||
"domain",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"uuid",
|
||||
]
|
||||
@@ -331,6 +333,17 @@ dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfb"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"fnv",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.4"
|
||||
@@ -376,13 +389,6 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "common"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "concurrent-queue"
|
||||
version = "2.5.0"
|
||||
@@ -598,7 +604,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"chrono",
|
||||
"common",
|
||||
"email_address",
|
||||
"thiserror",
|
||||
"uuid",
|
||||
@@ -681,6 +686,16 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "event-publisher"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"domain",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "find-msvc-tools"
|
||||
version = "0.1.9"
|
||||
@@ -1235,6 +1250,15 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "infer"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7"
|
||||
dependencies = [
|
||||
"cfb",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.12.0"
|
||||
@@ -1737,6 +1761,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"domain",
|
||||
"infer",
|
||||
"object_store",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -1779,7 +1804,9 @@ dependencies = [
|
||||
"chrono",
|
||||
"domain",
|
||||
"dotenvy",
|
||||
"event-publisher",
|
||||
"http-body-util",
|
||||
"infer",
|
||||
"metadata",
|
||||
"poster-fetcher",
|
||||
"poster-storage",
|
||||
@@ -2804,6 +2831,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"application",
|
||||
"askama",
|
||||
"chrono",
|
||||
"domain",
|
||||
"serde",
|
||||
]
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/adapters/auth",
|
||||
"crates/adapters/metadata", "crates/adapters/poster-fetcher", "crates/adapters/poster-storage",
|
||||
"crates/adapters/event-publisher",
|
||||
"crates/adapters/metadata",
|
||||
"crates/adapters/poster-fetcher",
|
||||
"crates/adapters/poster-storage",
|
||||
"crates/adapters/rss",
|
||||
"crates/adapters/sqlite",
|
||||
"crates/adapters/template-askama",
|
||||
"crates/application",
|
||||
"crates/common",
|
||||
"crates/domain",
|
||||
"crates/presentation",
|
||||
]
|
||||
@@ -34,13 +36,13 @@ reqwest = { version = "0.13", features = ["json", "query"] }
|
||||
object_store = { version = "0.11", features = ["aws"] }
|
||||
|
||||
domain = { path = "crates/domain" }
|
||||
common = { path = "crates/common" }
|
||||
application = { path = "crates/application" }
|
||||
presentation = { path = "crates/presentation" }
|
||||
auth = { path = "crates/adapters/auth" }
|
||||
metadata = { path = "crates/adapters/metadata" }
|
||||
poster-fetcher = { path = "crates/adapters/poster-fetcher" }
|
||||
poster-storage = { path = "crates/adapters/poster-storage" }
|
||||
event-publisher = { path = "crates/adapters/event-publisher" }
|
||||
rss = { path = "crates/adapters/rss" }
|
||||
sqlite = { path = "crates/adapters/sqlite" }
|
||||
template-askama = { path = "crates/adapters/template-askama" }
|
||||
|
||||
57
Dockerfile
Normal file
57
Dockerfile
Normal file
@@ -0,0 +1,57 @@
|
||||
# ----- build -----
|
||||
FROM rust:slim-bookworm AS builder
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends sqlite3 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
# Cache dependency compilation separately from source
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY crates/adapters/auth/Cargo.toml crates/adapters/auth/Cargo.toml
|
||||
COPY crates/adapters/event-publisher/Cargo.toml crates/adapters/event-publisher/Cargo.toml
|
||||
COPY crates/adapters/metadata/Cargo.toml crates/adapters/metadata/Cargo.toml
|
||||
COPY crates/adapters/poster-fetcher/Cargo.toml crates/adapters/poster-fetcher/Cargo.toml
|
||||
COPY crates/adapters/poster-storage/Cargo.toml crates/adapters/poster-storage/Cargo.toml
|
||||
COPY crates/adapters/rss/Cargo.toml crates/adapters/rss/Cargo.toml
|
||||
COPY crates/adapters/sqlite/Cargo.toml crates/adapters/sqlite/Cargo.toml
|
||||
COPY crates/adapters/template-askama/Cargo.toml crates/adapters/template-askama/Cargo.toml
|
||||
COPY crates/application/Cargo.toml crates/application/Cargo.toml
|
||||
COPY crates/domain/Cargo.toml crates/domain/Cargo.toml
|
||||
COPY crates/presentation/Cargo.toml crates/presentation/Cargo.toml
|
||||
|
||||
# Stub every crate so cargo can resolve and fetch deps
|
||||
RUN find crates -name "Cargo.toml" | sed 's|/Cargo.toml||' | \
|
||||
xargs -I{} sh -c 'mkdir -p {}/src && echo "fn main(){}" > {}/src/main.rs && echo "" > {}/src/lib.rs'
|
||||
|
||||
RUN cargo fetch
|
||||
|
||||
# Now copy real sources (invalidates cache only on source changes)
|
||||
COPY crates ./crates
|
||||
|
||||
# sqlx macros verify queries at compile time; create a real DB from migrations
|
||||
RUN sqlite3 /build/dev.db \
|
||||
< crates/adapters/sqlite/migrations/0001_initial.sql && \
|
||||
sqlite3 /build/dev.db \
|
||||
< crates/adapters/sqlite/migrations/0002_users.sql
|
||||
|
||||
ENV DATABASE_URL=sqlite:///build/dev.db
|
||||
|
||||
RUN cargo build --release -p presentation
|
||||
|
||||
# ----- runtime -----
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /build/target/release/presentation ./presentation
|
||||
COPY static ./static
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
ENV RUST_LOG=presentation=info,tower_http=info
|
||||
|
||||
CMD ["./presentation"]
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2026 Gabriel Kaszewski
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
85
README.md
Normal file
85
README.md
Normal file
@@ -0,0 +1,85 @@
|
||||
# Movies Diary
|
||||
|
||||
A self-hosted, server-side rendered movie logging system. Built in Rust — no JavaScript, no SPA, just HTML forms and an RSS feed. Designed to run as a lightweight widget embedded on a personal site.
|
||||
|
||||
## Features
|
||||
|
||||
- Log movies with a TMDB/OMDb ID and a 0–5 rating
|
||||
- Immutable append-only viewing ledger (tracks re-watches)
|
||||
- Background poster fetching and storage (S3-compatible)
|
||||
- RSS/Atom feed for public subscription
|
||||
- JWT authentication via cookie (HTML) or Bearer token (REST API)
|
||||
- Zero JavaScript
|
||||
|
||||
## Architecture
|
||||
|
||||
Hexagonal (Ports & Adapters) with Domain-Driven Design:
|
||||
|
||||
```
|
||||
domain — pure types and trait definitions, no external deps
|
||||
application — use cases / business logic orchestration
|
||||
presentation — Axum HTTP router, wires all adapters together
|
||||
adapters/
|
||||
auth — JWT issuance and validation (Argon2 passwords)
|
||||
sqlite — SQLite repository via sqlx
|
||||
metadata — OMDb HTTP client
|
||||
poster-fetcher — downloads poster images
|
||||
poster-storage — uploads posters to S3-compatible storage
|
||||
template-askama — Askama HTML rendering
|
||||
rss — RSS/Atom feed generation
|
||||
event-publisher — async event channel for background poster sync
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Rust (stable, 2024 edition)
|
||||
- SQLite
|
||||
- An S3-compatible object store (e.g. MinIO) for poster storage
|
||||
- An [OMDb API key](https://www.omdbapi.com/apikey.aspx)
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Copy and fill in the following (e.g. in a `.env` file):
|
||||
|
||||
```env
|
||||
# Database
|
||||
DATABASE_URL=sqlite://movies.db
|
||||
|
||||
# Authentication
|
||||
JWT_SECRET=change-me
|
||||
JWT_TTL_SECONDS=86400
|
||||
|
||||
# OMDb metadata
|
||||
OMDB_API_KEY=your-key
|
||||
|
||||
# Poster storage (S3-compatible)
|
||||
MINIO_ENDPOINT=http://localhost:9000
|
||||
MINIO_BUCKET=posters
|
||||
MINIO_REGION=us-east-1
|
||||
MINIO_ACCESS_KEY_ID=minioadmin
|
||||
MINIO_SECRET_ACCESS_KEY=minioadmin
|
||||
|
||||
# Optional
|
||||
ALLOW_REGISTRATION=false
|
||||
POSTER_FETCH_TIMEOUT_SECONDS=10
|
||||
EVENT_CHANNEL_BUFFER=32
|
||||
RUST_LOG=presentation=debug,tower_http=debug
|
||||
```
|
||||
|
||||
## Run
|
||||
|
||||
```bash
|
||||
cargo run -p presentation
|
||||
```
|
||||
|
||||
Server listens on `0.0.0.0:3000`.
|
||||
|
||||
## Test
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT License. See [LICENSE](LICENSE).
|
||||
10
crates/adapters/event-publisher/Cargo.toml
Normal file
10
crates/adapters/event-publisher/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "event-publisher"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
domain = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
209
crates/adapters/event-publisher/src/lib.rs
Normal file
209
crates/adapters/event-publisher/src/lib.rs
Normal file
@@ -0,0 +1,209 @@
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
pub struct EventPublisherConfig {
|
||||
pub channel_buffer: usize,
|
||||
}
|
||||
|
||||
impl EventPublisherConfig {
|
||||
pub fn from_env() -> Self {
|
||||
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(128);
|
||||
Self { channel_buffer }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EventHandler: Send + Sync {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||
}
|
||||
|
||||
pub struct ChannelEventPublisher {
|
||||
sender: mpsc::Sender<DomainEvent>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for ChannelEventPublisher {
|
||||
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
self.sender
|
||||
.send(event.clone())
|
||||
.await
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EventWorker {
|
||||
receiver: mpsc::Receiver<DomainEvent>,
|
||||
handlers: Vec<Box<dyn EventHandler>>,
|
||||
}
|
||||
|
||||
impl EventWorker {
|
||||
pub async fn run(mut self) {
|
||||
while let Some(event) = self.receiver.recv().await {
|
||||
match &event {
|
||||
DomainEvent::ReviewLogged {
|
||||
review_id,
|
||||
movie_id,
|
||||
user_id,
|
||||
rating,
|
||||
watched_at,
|
||||
} => {
|
||||
tracing::info!(
|
||||
review_id = %review_id.value(),
|
||||
movie_id = %movie_id.value(),
|
||||
user_id = %user_id.value(),
|
||||
rating = rating.value(),
|
||||
watched_at = %watched_at,
|
||||
"event: review_logged"
|
||||
);
|
||||
}
|
||||
DomainEvent::MovieDiscovered {
|
||||
movie_id,
|
||||
external_metadata_id,
|
||||
} => {
|
||||
tracing::info!(
|
||||
movie_id = %movie_id.value(),
|
||||
external_id = external_metadata_id.value(),
|
||||
"event: movie_discovered"
|
||||
);
|
||||
}
|
||||
}
|
||||
for handler in &self.handlers {
|
||||
if let Err(e) = handler.handle(&event).await {
|
||||
tracing::error!("event handler error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
tracing::info!("event worker shut down");
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NoopEventPublisher;
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for NoopEventPublisher {
|
||||
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_event_channel(
|
||||
config: EventPublisherConfig,
|
||||
handlers: Vec<Box<dyn EventHandler>>,
|
||||
) -> (ChannelEventPublisher, EventWorker) {
|
||||
let (tx, rx) = mpsc::channel(config.channel_buffer);
|
||||
(
|
||||
ChannelEventPublisher { sender: tx },
|
||||
EventWorker {
|
||||
receiver: rx,
|
||||
handlers,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
value_objects::{ExternalMetadataId, MovieId},
|
||||
};
|
||||
|
||||
struct RecordingHandler {
|
||||
calls: Arc<Mutex<Vec<String>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventHandler for RecordingHandler {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
let label = match event {
|
||||
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
|
||||
DomainEvent::ReviewLogged { .. } => "review_logged",
|
||||
};
|
||||
self.calls.lock().unwrap().push(label.to_string());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn single_handler_receives_event() {
|
||||
let calls = Arc::new(Mutex::new(vec![]));
|
||||
let handler = RecordingHandler { calls: Arc::clone(&calls) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
|
||||
|
||||
let handle = tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
drop(publisher);
|
||||
handle.await.unwrap();
|
||||
|
||||
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn multiple_handlers_all_receive_event() {
|
||||
let calls1 = Arc::new(Mutex::new(vec![]));
|
||||
let calls2 = Arc::new(Mutex::new(vec![]));
|
||||
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
|
||||
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(
|
||||
config,
|
||||
vec![Box::new(handler1), Box::new(handler2)],
|
||||
);
|
||||
|
||||
let handle = tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
drop(publisher);
|
||||
handle.await.unwrap();
|
||||
|
||||
assert_eq!(calls1.lock().unwrap().len(), 1);
|
||||
assert_eq!(calls2.lock().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn handler_error_does_not_stop_worker() {
|
||||
struct FailingHandler;
|
||||
#[async_trait]
|
||||
impl EventHandler for FailingHandler {
|
||||
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
|
||||
Err(DomainError::InfrastructureError("boom".into()))
|
||||
}
|
||||
}
|
||||
|
||||
let calls = Arc::new(Mutex::new(vec![]));
|
||||
let good = RecordingHandler { calls: Arc::clone(&calls) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(
|
||||
config,
|
||||
vec![Box::new(FailingHandler), Box::new(good)],
|
||||
);
|
||||
|
||||
let handle = tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
drop(publisher);
|
||||
handle.await.unwrap();
|
||||
|
||||
assert_eq!(calls.lock().unwrap().len(), 1);
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ anyhow = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
object_store = { workspace = true }
|
||||
infer = "0.19.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true }
|
||||
|
||||
@@ -7,9 +7,15 @@ use domain::{
|
||||
ports::PosterStorage,
|
||||
value_objects::{MovieId, PosterPath},
|
||||
};
|
||||
use object_store::{path::Path, ObjectStore};
|
||||
use object_store::{Attribute, Attributes, PutOptions, path::Path, ObjectStore};
|
||||
use std::sync::Arc;
|
||||
|
||||
fn detect_mime(bytes: &[u8]) -> &'static str {
|
||||
infer::get(bytes)
|
||||
.map(|t| t.mime_type())
|
||||
.unwrap_or("application/octet-stream")
|
||||
}
|
||||
|
||||
pub struct PosterStorageAdapter {
|
||||
store: Arc<dyn ObjectStore>,
|
||||
}
|
||||
@@ -32,8 +38,12 @@ impl PosterStorage for PosterStorageAdapter {
|
||||
image_bytes: &[u8],
|
||||
) -> Result<PosterPath, DomainError> {
|
||||
let path = Path::from(movie_id.value().to_string());
|
||||
let mime = detect_mime(image_bytes);
|
||||
let mut attributes = Attributes::new();
|
||||
attributes.insert(Attribute::ContentType, mime.into());
|
||||
let opts = PutOptions { attributes, ..Default::default() };
|
||||
self.store
|
||||
.put(&path, image_bytes.to_vec().into())
|
||||
.put_opts(&path, image_bytes.to_vec().into(), opts)
|
||||
.await
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
PosterPath::new(path.to_string())
|
||||
|
||||
@@ -3,22 +3,21 @@ use domain::models::DiaryEntry;
|
||||
use rss_feed::{ChannelBuilder, GuidBuilder, ItemBuilder};
|
||||
|
||||
pub struct RssAdapter {
|
||||
feed_title: String,
|
||||
feed_link: String,
|
||||
}
|
||||
|
||||
impl RssAdapter {
|
||||
pub fn new(feed_title: String, feed_link: String) -> Self {
|
||||
Self { feed_title, feed_link }
|
||||
pub fn new(feed_link: String) -> Self {
|
||||
Self { feed_link }
|
||||
}
|
||||
}
|
||||
|
||||
impl RssFeedRenderer for RssAdapter {
|
||||
fn render_feed(&self, entries: &[DiaryEntry]) -> Result<String, String> {
|
||||
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String> {
|
||||
let items = entries
|
||||
.iter()
|
||||
.map(|e| {
|
||||
let title = format!(
|
||||
let item_title = format!(
|
||||
"{} ({})",
|
||||
e.movie().title().value(),
|
||||
e.movie().release_year().value()
|
||||
@@ -38,7 +37,7 @@ impl RssFeedRenderer for RssAdapter {
|
||||
.permalink(false)
|
||||
.build();
|
||||
ItemBuilder::default()
|
||||
.title(Some(title))
|
||||
.title(Some(item_title))
|
||||
.description(Some(description))
|
||||
.pub_date(Some(pub_date))
|
||||
.guid(Some(guid))
|
||||
@@ -47,12 +46,31 @@ impl RssFeedRenderer for RssAdapter {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let channel = ChannelBuilder::default()
|
||||
.title(self.feed_title.clone())
|
||||
.title(title.to_string())
|
||||
.link(self.feed_link.clone())
|
||||
.description(self.feed_title.clone())
|
||||
.description(title.to_string())
|
||||
.items(items)
|
||||
.build();
|
||||
|
||||
Ok(channel.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn render_feed_uses_provided_title() {
|
||||
let adapter = RssAdapter::new("http://example.com".into());
|
||||
let xml = adapter.render_feed(&[], "Custom Title").unwrap();
|
||||
assert!(xml.contains("<title>Custom Title</title>"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn render_feed_empty_entries_produces_valid_xml() {
|
||||
let adapter = RssAdapter::new("http://example.com".into());
|
||||
let xml = adapter.render_feed(&[], "My Feed").unwrap();
|
||||
assert!(xml.starts_with("<?xml") || xml.starts_with("<rss"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "01a08873b7fa815ad98a56a0902b60414cfcdc2c7a8570351320c4bc425347c6"
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "026e2afeb573707cb360fcdab8f6137aabfaf603b5ed57b98ac2888b4a0389ff"
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) FROM reviews",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "COUNT(*)",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "0963b9661182e139cd760bbabb0d6ea3a301a2a3adbdfdda4a88f333a1144c77"
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE title = ? AND release_year = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "3047579c6ed13ce87aad9b9ce6300c02f0df3516979518976e13f9d9abc6a403"
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "33d0dae7d16b0635c1c7eb5afd10824bb55af7cc7a854f590d326622863759d1"
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "47f7cf95ce3450635b643ab710cadba96f40319140834d510bc5207b2552e055"
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) FROM reviews WHERE movie_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "COUNT(*)",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "4b3074b532342c6356ee0e8e4d8c4a830f016234bb690e1f6240f02824d6d84f"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 7
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "630e092fcd33bc312befef352a98225e6e18e6079644b949258a39bf4b0fe3e5"
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE external_metadata_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "7bc4aebcb94547976d3d7e063e4e908fc22b977b3cbf063ee93ffe4648c42011"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n external_metadata_id = excluded.external_metadata_id,\n title = excluded.title,\n release_year = excluded.release_year,\n director = excluded.director,\n poster_path = excluded.poster_path",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 6
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "7d7e23355ee0e442f2aa27e898dcfa40bdc4b09391afe04325f076157d9d84aa"
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "af883f8b78f185077e2d3dcfaa0a6e62fbdfbf00c97c9b33b699dc631476181d"
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "affe1eb261283c09d4b1ce6e684681755f079a044ffec8ff2bd79cfd8efe16b8"
|
||||
}
|
||||
@@ -3,11 +3,12 @@ use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{
|
||||
DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, SortDirection,
|
||||
collections::Paginated,
|
||||
DiaryEntry, DiaryFilter, DirectorStat, FeedEntry, Movie, MonthlyRating,
|
||||
Review, ReviewHistory, SortDirection, UserStats, UserTrends,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
ports::MovieRepository,
|
||||
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear},
|
||||
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear, ReviewId, UserId},
|
||||
};
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
@@ -15,10 +16,26 @@ mod migrations;
|
||||
mod models;
|
||||
mod users;
|
||||
|
||||
use models::{DiaryRow, MovieRow, ReviewRow, datetime_to_str};
|
||||
use models::{
|
||||
DiaryRow, DirectorCountRow, FeedRow, MonthlyRatingRow, MovieRow, ReviewRow,
|
||||
UserTotalsRow, datetime_to_str,
|
||||
};
|
||||
|
||||
pub use users::SqliteUserRepository;
|
||||
|
||||
fn format_year_month(ym: &str) -> String {
|
||||
let parts: Vec<&str> = ym.splitn(2, '-').collect();
|
||||
if parts.len() != 2 { return ym.to_string(); }
|
||||
let year = parts[0].get(2..).unwrap_or(parts[0]);
|
||||
let month = match parts[1] {
|
||||
"01" => "Jan", "02" => "Feb", "03" => "Mar", "04" => "Apr",
|
||||
"05" => "May", "06" => "Jun", "07" => "Jul", "08" => "Aug",
|
||||
"09" => "Sep", "10" => "Oct", "11" => "Nov", "12" => "Dec",
|
||||
_ => parts[1],
|
||||
};
|
||||
format!("{} '{}", month, year)
|
||||
}
|
||||
|
||||
pub struct SqliteMovieRepository {
|
||||
pool: SqlitePool,
|
||||
}
|
||||
@@ -52,17 +69,15 @@ impl SqliteMovieRepository {
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_diary_rows(
|
||||
async fn fetch_all_diary_rows(
|
||||
&self,
|
||||
movie_id: Option<&str>,
|
||||
sort: &SortDirection,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
// sqlx macros require literal ORDER BY values; separate branches also let the
|
||||
// query planner use the movie_id index instead of falling back to a filtered scan.
|
||||
match (movie_id, sort) {
|
||||
(None, SortDirection::Descending) => sqlx::query_as!(
|
||||
match sort {
|
||||
// ByRatingDesc only applies to user-scoped queries; falls back to date sort here
|
||||
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
@@ -77,7 +92,7 @@ impl SqliteMovieRepository {
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
(None, SortDirection::Ascending) => sqlx::query_as!(
|
||||
SortDirection::Ascending => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
@@ -91,42 +106,188 @@ impl SqliteMovieRepository {
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
(Some(id), SortDirection::Descending) => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
(Some(id), SortDirection::Ascending) => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at ASC
|
||||
LIMIT ? OFFSET ?",
|
||||
id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_movie_diary_rows(
|
||||
&self,
|
||||
movie_id: &str,
|
||||
sort: &SortDirection,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
match sort {
|
||||
// ByRatingDesc only applies to user-scoped queries; falls back to date sort here
|
||||
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
movie_id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
SortDirection::Ascending => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at ASC
|
||||
LIMIT ? OFFSET ?",
|
||||
movie_id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
}
|
||||
}
|
||||
|
||||
async fn count_user_diary_entries(&self, user_id: &str) -> Result<i64, DomainError> {
|
||||
sqlx::query_scalar!(
|
||||
"SELECT COUNT(*) FROM reviews WHERE user_id = ?",
|
||||
user_id
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_diary_rows_by_watched(
|
||||
&self,
|
||||
user_id: &str,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ?
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
user_id, limit, offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_diary_rows_by_rating(
|
||||
&self,
|
||||
user_id: &str,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ?
|
||||
ORDER BY r.rating DESC, r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
user_id, limit, offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn count_feed_entries(&self) -> Result<i64, DomainError> {
|
||||
sqlx::query_scalar!("SELECT COUNT(*) FROM reviews")
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_feed_rows(
|
||||
&self,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<FeedRow>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
FeedRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,
|
||||
u.email AS user_email
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
INNER JOIN users u ON u.id = r.user_id
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
limit, offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_totals(&self, user_id: &str) -> Result<UserTotalsRow, DomainError> {
|
||||
sqlx::query_as!(
|
||||
UserTotalsRow,
|
||||
r#"SELECT COUNT(DISTINCT movie_id) AS "total!: i64",
|
||||
AVG(CAST(rating AS REAL)) AS avg_rating
|
||||
FROM reviews WHERE user_id = ?"#,
|
||||
user_id
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_favorite_director(
|
||||
&self,
|
||||
user_id: &str,
|
||||
) -> Result<Option<String>, DomainError> {
|
||||
let row = sqlx::query_scalar!(
|
||||
"SELECT m.director
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ? AND m.director IS NOT NULL
|
||||
GROUP BY m.director
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 1",
|
||||
user_id
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(row.flatten())
|
||||
}
|
||||
|
||||
async fn fetch_user_most_active_month(
|
||||
&self,
|
||||
user_id: &str,
|
||||
) -> Result<Option<String>, DomainError> {
|
||||
let result: Option<Option<String>> = sqlx::query_scalar!(
|
||||
"SELECT strftime('%Y-%m', watched_at) AS month
|
||||
FROM reviews
|
||||
WHERE user_id = ?
|
||||
GROUP BY month
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 1",
|
||||
user_id
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(result.flatten())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -251,14 +412,40 @@ impl MovieRepository for SqliteMovieRepository {
|
||||
}
|
||||
|
||||
async fn query_diary(&self, filter: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
let movie_id: Option<String> = filter.movie_id.as_ref().map(|id| id.value().to_string());
|
||||
let limit = filter.page.limit as i64;
|
||||
let offset = filter.page.offset as i64;
|
||||
|
||||
let (total, rows) = tokio::try_join!(
|
||||
self.count_diary_entries(movie_id.as_deref()),
|
||||
self.fetch_diary_rows(movie_id.as_deref(), &filter.sort_by, limit, offset)
|
||||
)?;
|
||||
let (total, rows) = match (&filter.movie_id, &filter.user_id) {
|
||||
(None, None) => tokio::try_join!(
|
||||
self.count_diary_entries(None),
|
||||
self.fetch_all_diary_rows(&filter.sort_by, limit, offset)
|
||||
)?,
|
||||
(Some(id), None) => {
|
||||
let id_str = id.value().to_string();
|
||||
tokio::try_join!(
|
||||
self.count_diary_entries(Some(id_str.as_str())),
|
||||
self.fetch_movie_diary_rows(&id_str, &filter.sort_by, limit, offset)
|
||||
)?
|
||||
}
|
||||
(None, Some(uid)) => {
|
||||
let uid_str = uid.value().to_string();
|
||||
match &filter.sort_by {
|
||||
SortDirection::ByRatingDesc => tokio::try_join!(
|
||||
self.count_user_diary_entries(&uid_str),
|
||||
self.fetch_user_diary_rows_by_rating(&uid_str, limit, offset)
|
||||
)?,
|
||||
_ => tokio::try_join!(
|
||||
self.count_user_diary_entries(&uid_str),
|
||||
self.fetch_user_diary_rows_by_watched(&uid_str, limit, offset)
|
||||
)?,
|
||||
}
|
||||
}
|
||||
(Some(_), Some(_)) => {
|
||||
return Err(DomainError::ValidationError(
|
||||
"Combined movie_id + user_id filter not supported".into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let items = rows
|
||||
.into_iter()
|
||||
@@ -273,6 +460,39 @@ impl MovieRepository for SqliteMovieRepository {
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError> {
|
||||
let id = review_id.value().to_string();
|
||||
sqlx::query_as!(
|
||||
ReviewRow,
|
||||
"SELECT id, movie_id, user_id, rating, comment, watched_at, created_at
|
||||
FROM reviews WHERE id = ?",
|
||||
id
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?
|
||||
.map(ReviewRow::to_domain)
|
||||
.transpose()
|
||||
}
|
||||
|
||||
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError> {
|
||||
let id = review_id.value().to_string();
|
||||
sqlx::query!("DELETE FROM reviews WHERE id = ?", id)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError> {
|
||||
let id = movie_id.value().to_string();
|
||||
sqlx::query!("DELETE FROM movies WHERE id = ?", id)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
let id_str = movie_id.value().to_string();
|
||||
|
||||
@@ -303,4 +523,119 @@ impl MovieRepository for SqliteMovieRepository {
|
||||
|
||||
Ok(ReviewHistory::new(movie, viewings))
|
||||
}
|
||||
|
||||
async fn query_activity_feed(
|
||||
&self,
|
||||
page: &PageParams,
|
||||
) -> Result<Paginated<FeedEntry>, DomainError> {
|
||||
let limit = page.limit as i64;
|
||||
let offset = page.offset as i64;
|
||||
|
||||
let (total, rows) = tokio::try_join!(
|
||||
self.count_feed_entries(),
|
||||
self.fetch_feed_rows(limit, offset)
|
||||
)?;
|
||||
|
||||
let items = rows
|
||||
.into_iter()
|
||||
.map(FeedRow::to_domain)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Ok(Paginated {
|
||||
items,
|
||||
total_count: total as u64,
|
||||
limit: page.limit,
|
||||
offset: page.offset,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError> {
|
||||
let uid = user_id.value().to_string();
|
||||
|
||||
let (totals, fav_director, most_active) = tokio::try_join!(
|
||||
self.fetch_user_totals(&uid),
|
||||
self.fetch_user_favorite_director(&uid),
|
||||
self.fetch_user_most_active_month(&uid)
|
||||
)?;
|
||||
|
||||
let most_active_month = most_active.map(|ym| format_year_month(&ym));
|
||||
|
||||
Ok(UserStats {
|
||||
total_movies: totals.total,
|
||||
avg_rating: totals.avg_rating,
|
||||
favorite_director: fav_director,
|
||||
most_active_month,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError> {
|
||||
let uid = user_id.value().to_string();
|
||||
let rows = sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ?
|
||||
ORDER BY r.watched_at DESC",
|
||||
uid
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
rows.into_iter().map(DiaryRow::to_domain).collect()
|
||||
}
|
||||
|
||||
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError> {
|
||||
let uid = user_id.value().to_string();
|
||||
|
||||
let (rating_rows, director_rows) = tokio::try_join!(
|
||||
sqlx::query_as!(
|
||||
MonthlyRatingRow,
|
||||
r#"SELECT strftime('%Y-%m', watched_at) AS "month!",
|
||||
AVG(CAST(rating AS REAL)) AS "avg_rating!: f64",
|
||||
COUNT(*) AS "count!: i64"
|
||||
FROM reviews
|
||||
WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')
|
||||
GROUP BY "month!"
|
||||
ORDER BY "month!" ASC"#,
|
||||
uid
|
||||
)
|
||||
.fetch_all(&self.pool),
|
||||
sqlx::query_as!(
|
||||
DirectorCountRow,
|
||||
r#"SELECT m.director AS "director!",
|
||||
COUNT(*) AS "count!: i64"
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ? AND m.director IS NOT NULL
|
||||
GROUP BY m.director
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 5"#,
|
||||
uid
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
)
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
let max_director_count = director_rows.iter().map(|d| d.count).max().unwrap_or(1);
|
||||
|
||||
let monthly_ratings = rating_rows
|
||||
.into_iter()
|
||||
.map(|r| MonthlyRating {
|
||||
month_label: format_year_month(&r.month),
|
||||
year_month: r.month,
|
||||
avg_rating: r.avg_rating,
|
||||
count: r.count,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let top_directors = director_rows
|
||||
.into_iter()
|
||||
.map(|d| DirectorStat { director: d.director, count: d.count })
|
||||
.collect();
|
||||
|
||||
Ok(UserTrends { monthly_ratings, top_directors, max_director_count })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::{DiaryEntry, Movie, Review},
|
||||
models::{DiaryEntry, FeedEntry, Movie, Review, UserSummary},
|
||||
value_objects::{
|
||||
Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear,
|
||||
ReviewId, UserId,
|
||||
@@ -111,6 +111,85 @@ impl DiaryRow {
|
||||
}
|
||||
}
|
||||
|
||||
// Like DiaryRow but includes user_email from JOIN with users table
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct FeedRow {
|
||||
pub id: String,
|
||||
pub external_metadata_id: Option<String>,
|
||||
pub title: String,
|
||||
pub release_year: i64,
|
||||
pub director: Option<String>,
|
||||
pub poster_path: Option<String>,
|
||||
pub review_id: String,
|
||||
pub movie_id: String,
|
||||
pub user_id: String,
|
||||
pub rating: i64,
|
||||
pub comment: Option<String>,
|
||||
pub watched_at: String,
|
||||
pub created_at: String,
|
||||
pub user_email: String,
|
||||
}
|
||||
|
||||
impl FeedRow {
|
||||
pub fn to_domain(self) -> Result<FeedEntry, DomainError> {
|
||||
let diary = DiaryRow {
|
||||
id: self.id,
|
||||
external_metadata_id: self.external_metadata_id,
|
||||
title: self.title,
|
||||
release_year: self.release_year,
|
||||
director: self.director,
|
||||
poster_path: self.poster_path,
|
||||
review_id: self.review_id,
|
||||
movie_id: self.movie_id,
|
||||
user_id: self.user_id,
|
||||
rating: self.rating,
|
||||
comment: self.comment,
|
||||
watched_at: self.watched_at,
|
||||
created_at: self.created_at,
|
||||
}
|
||||
.to_domain()?;
|
||||
Ok(FeedEntry::new(diary, self.user_email))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct UserSummaryRow {
|
||||
pub id: String,
|
||||
pub email: String,
|
||||
pub total_movies: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
}
|
||||
|
||||
impl UserSummaryRow {
|
||||
pub fn to_domain(self) -> Result<UserSummary, DomainError> {
|
||||
Ok(UserSummary {
|
||||
user_id: UserId::from_uuid(parse_uuid(&self.id)?),
|
||||
email: self.email,
|
||||
total_movies: self.total_movies,
|
||||
avg_rating: self.avg_rating,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct UserTotalsRow {
|
||||
pub total: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct DirectorCountRow {
|
||||
pub director: String,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct MonthlyRatingRow {
|
||||
pub month: String,
|
||||
pub avg_rating: f64,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> {
|
||||
Uuid::parse_str(s)
|
||||
.map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e)))
|
||||
|
||||
@@ -8,6 +8,7 @@ use domain::{
|
||||
ports::UserRepository,
|
||||
value_objects::{Email, PasswordHash, UserId},
|
||||
};
|
||||
use super::models::UserSummaryRow;
|
||||
|
||||
pub struct SqliteUserRepository {
|
||||
pool: SqlitePool,
|
||||
@@ -73,4 +74,99 @@ impl UserRepository for SqliteUserRepository {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError> {
|
||||
let id_str = id.value().to_string();
|
||||
let row = sqlx::query!(
|
||||
"SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||
id_str
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
match row {
|
||||
None => Ok(None),
|
||||
Some(r) => {
|
||||
let uuid = uuid::Uuid::parse_str(&r.id)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let email = Email::new(r.email)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let hash = PasswordHash::new(r.password_hash)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
Ok(Some(User::from_persistence(UserId::from_uuid(uuid), email, hash)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
UserSummaryRow,
|
||||
r#"SELECT u.id,
|
||||
u.email,
|
||||
COUNT(DISTINCT r.movie_id) AS "total_movies!: i64",
|
||||
AVG(CAST(r.rating AS REAL)) AS avg_rating
|
||||
FROM users u
|
||||
LEFT JOIN reviews r ON r.user_id = u.id
|
||||
GROUP BY u.id, u.email
|
||||
ORDER BY u.email ASC"#
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?
|
||||
.into_iter()
|
||||
.map(UserSummaryRow::to_domain)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
async fn setup() -> (SqlitePool, SqliteUserRepository) {
|
||||
let pool = SqlitePool::connect(":memory:").await.unwrap();
|
||||
sqlx::query(
|
||||
"CREATE TABLE users (id TEXT PRIMARY KEY, email TEXT NOT NULL UNIQUE, password_hash TEXT NOT NULL, created_at TEXT NOT NULL)"
|
||||
)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
let repo = SqliteUserRepository::new(pool.clone());
|
||||
(pool, repo)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn find_by_id_returns_none_when_not_found() {
|
||||
let (_, repo) = setup().await;
|
||||
let result = repo
|
||||
.find_by_id(&UserId::from_uuid(uuid::Uuid::new_v4()))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn find_by_id_returns_user_when_found() {
|
||||
let (pool, repo) = setup().await;
|
||||
let id = uuid::Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)"
|
||||
)
|
||||
.bind(id.to_string())
|
||||
.bind("test@example.com")
|
||||
.bind("$argon2id$v=19$m=65536,t=2,p=1$fakesalt$fakehash")
|
||||
.bind("2026-01-01T00:00:00Z")
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = repo
|
||||
.find_by_id(&UserId::from_uuid(id))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().email().value(), "test@example.com");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ edition = "2024"
|
||||
askama = { version = "0.16.0" }
|
||||
|
||||
serde = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
|
||||
domain = { workspace = true }
|
||||
application = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
use askama::Template;
|
||||
use application::ports::HtmlRenderer;
|
||||
use domain::models::{DiaryEntry, collections::Paginated};
|
||||
use chrono::Datelike;
|
||||
use application::ports::{
|
||||
ActivityFeedPageData, HtmlPageContext, HtmlRenderer, LoginPageData,
|
||||
NewReviewPageData, ProfilePageData, RegisterPageData, UsersPageData,
|
||||
};
|
||||
use domain::models::{
|
||||
DiaryEntry, FeedEntry, MonthActivity, MonthlyRating, UserStats, UserSummary, UserTrends,
|
||||
collections::Paginated,
|
||||
};
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "diary.html")]
|
||||
@@ -9,6 +16,117 @@ struct DiaryTemplate<'a> {
|
||||
current_offset: u32,
|
||||
limit: u32,
|
||||
has_more: bool,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "login.html")]
|
||||
struct LoginTemplate<'a> {
|
||||
error: Option<&'a str>,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "register.html")]
|
||||
struct RegisterTemplate<'a> {
|
||||
error: Option<&'a str>,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "new_review.html")]
|
||||
struct NewReviewTemplate<'a> {
|
||||
error: Option<&'a str>,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "activity_feed.html")]
|
||||
struct ActivityFeedTemplate<'a> {
|
||||
entries: &'a [FeedEntry],
|
||||
current_offset: u32,
|
||||
limit: u32,
|
||||
has_more: bool,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "users.html")]
|
||||
struct UsersTemplate<'a> {
|
||||
users: &'a [UserSummary],
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
struct MonthlyRatingRow<'a> {
|
||||
rating: &'a MonthlyRating,
|
||||
bar_height_px: i64,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "profile.html")]
|
||||
struct ProfileTemplate<'a> {
|
||||
ctx: &'a HtmlPageContext,
|
||||
profile_display_name: String,
|
||||
stats: &'a UserStats,
|
||||
view: &'a str,
|
||||
entries: Option<&'a Paginated<DiaryEntry>>,
|
||||
current_offset: u32,
|
||||
has_more: bool,
|
||||
limit: u32,
|
||||
history: Option<&'a Vec<MonthActivity>>,
|
||||
trends: Option<&'a UserTrends>,
|
||||
monthly_rating_rows: Vec<MonthlyRatingRow<'a>>,
|
||||
heatmap: Vec<HeatmapCell>,
|
||||
}
|
||||
|
||||
struct HeatmapCell {
|
||||
month_label: String,
|
||||
count: i64,
|
||||
alpha: f64,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn relative_time(dt: chrono::NaiveDateTime) -> String {
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let diff = now.signed_duration_since(dt);
|
||||
if diff.num_seconds() <= 0 { return "just now".to_string(); }
|
||||
let minutes = diff.num_minutes();
|
||||
let hours = diff.num_hours();
|
||||
let days = diff.num_days();
|
||||
if minutes < 1 { return "just now".to_string(); }
|
||||
if minutes < 60 { return format!("{} min ago", minutes); }
|
||||
if hours < 24 { return format!("{} h ago", hours); }
|
||||
if days == 1 { return "yesterday".to_string(); }
|
||||
if days < 30 { return format!("{} days ago", days); }
|
||||
dt.format("%b %-d, %Y").to_string()
|
||||
}
|
||||
|
||||
fn build_heatmap(history: &[MonthActivity]) -> Vec<HeatmapCell> {
|
||||
let current_year = chrono::Utc::now().year();
|
||||
let count_for = |m: &str| -> i64 {
|
||||
history.iter().find(|a| a.year_month == format!("{}-{}", current_year, m))
|
||||
.map(|a| a.count)
|
||||
.unwrap_or(0)
|
||||
};
|
||||
let months = [
|
||||
("01", "Jan"), ("02", "Feb"), ("03", "Mar"), ("04", "Apr"),
|
||||
("05", "May"), ("06", "Jun"), ("07", "Jul"), ("08", "Aug"),
|
||||
("09", "Sep"), ("10", "Oct"), ("11", "Nov"), ("12", "Dec"),
|
||||
];
|
||||
let counts: Vec<i64> = months.iter().map(|(m, _)| count_for(m)).collect();
|
||||
let max = counts.iter().copied().max().unwrap_or(0).max(1);
|
||||
months.iter().zip(counts.iter()).map(|((_, label), &count)| {
|
||||
let alpha = if count == 0 { 0.05 } else { 0.15 + 0.75 * (count as f64 / max as f64) };
|
||||
HeatmapCell {
|
||||
month_label: label.to_string(),
|
||||
count,
|
||||
alpha,
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
||||
fn bar_height_px(avg_rating: f64) -> i64 {
|
||||
(avg_rating / 5.0 * 60.0) as i64
|
||||
}
|
||||
|
||||
pub struct AskamaHtmlRenderer;
|
||||
@@ -20,16 +138,94 @@ impl AskamaHtmlRenderer {
|
||||
}
|
||||
|
||||
impl HtmlRenderer for AskamaHtmlRenderer {
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String> {
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String> {
|
||||
let has_more = (data.offset + data.limit) < data.total_count as u32;
|
||||
|
||||
let template = DiaryTemplate {
|
||||
DiaryTemplate {
|
||||
entries: &data.items,
|
||||
current_offset: data.offset,
|
||||
limit: data.limit,
|
||||
has_more,
|
||||
};
|
||||
ctx: &ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
template.render().map_err(|e| e.to_string())
|
||||
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String> {
|
||||
LoginTemplate {
|
||||
error: data.error,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String> {
|
||||
RegisterTemplate {
|
||||
error: data.error,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String> {
|
||||
NewReviewTemplate {
|
||||
error: data.error,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String> {
|
||||
ActivityFeedTemplate {
|
||||
entries: &data.entries.items,
|
||||
current_offset: data.current_offset,
|
||||
limit: data.limit,
|
||||
has_more: data.has_more,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_users_page(&self, data: UsersPageData) -> Result<String, String> {
|
||||
UsersTemplate {
|
||||
users: &data.users,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String> {
|
||||
let heatmap = data.history.as_deref()
|
||||
.map(|h| build_heatmap(h))
|
||||
.unwrap_or_default();
|
||||
let profile_display_name = data.profile_user_email
|
||||
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
|
||||
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
|
||||
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
|
||||
bar_height_px: bar_height_px(r.avg_rating),
|
||||
rating: r,
|
||||
}).collect())
|
||||
.unwrap_or_default();
|
||||
ProfileTemplate {
|
||||
ctx: &data.ctx,
|
||||
profile_display_name,
|
||||
stats: &data.stats,
|
||||
view: &data.view,
|
||||
entries: data.entries.as_ref(),
|
||||
current_offset: data.current_offset,
|
||||
has_more: data.has_more,
|
||||
limit: data.limit,
|
||||
history: data.history.as_ref(),
|
||||
trends: data.trends.as_ref(),
|
||||
monthly_rating_rows,
|
||||
heatmap,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
50
crates/adapters/template-askama/templates/activity_feed.html
Normal file
50
crates/adapters/template-askama/templates/activity_feed.html
Normal file
@@ -0,0 +1,50 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="diary">
|
||||
{% for entry in entries %}
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<div class="poster">
|
||||
<img src="/posters/{{ poster.value() }}" alt="">
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">
|
||||
{{ entry.movie().title().value() }}
|
||||
<span class="year">({{ entry.movie().release_year().value() }})</span>
|
||||
</div>
|
||||
{% if let Some(dir) = entry.movie().director() %}
|
||||
<div class="director">{{ dir }}</div>
|
||||
{% endif %}
|
||||
<div class="rating">
|
||||
{% for filled in entry.review().stars() %}
|
||||
<span class="star {% if filled %}filled{% else %}empty{% endif %}">★</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<div class="comment">{{ comment.value() }}</div>
|
||||
{% endif %}
|
||||
<div class="feed-meta">
|
||||
<a href="/users/{{ entry.review().user_id().value() }}" class="feed-user">{{ entry.user_display_name() }}</a>
|
||||
<span class="feed-time">{{ entry.review().watched_at().format("%b %-d, %Y") }}</span>
|
||||
</div>
|
||||
{% if ctx.is_current_user(entry.review().user_id().value()) %}
|
||||
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
|
||||
<button type="submit">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% else %}
|
||||
<p class="empty">No movies logged yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<nav class="pagination">
|
||||
{% if current_offset >= limit %}
|
||||
<a href="/?offset={{ current_offset - limit }}">← Prev</a>
|
||||
{% endif %}
|
||||
{% if has_more %}
|
||||
<a href="/?offset={{ current_offset + limit }}">Next →</a>
|
||||
{% endif %}
|
||||
</nav>
|
||||
{% endblock %}
|
||||
42
crates/adapters/template-askama/templates/base.html
Normal file
42
crates/adapters/template-askama/templates/base.html
Normal file
@@ -0,0 +1,42 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{{ ctx.page_title }}</title>
|
||||
<meta name="description" content="A personal movie diary — track what you watch, rate and review films.">
|
||||
<meta property="og:type" content="website">
|
||||
<meta property="og:site_name" content="Movies Diary">
|
||||
<meta property="og:title" content="{{ ctx.page_title }}">
|
||||
<meta property="og:url" content="{{ ctx.canonical_url }}">
|
||||
<meta name="twitter:card" content="summary">
|
||||
<meta name="twitter:title" content="{{ ctx.page_title }}">
|
||||
<link rel="canonical" href="{{ ctx.canonical_url }}">
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<a href="/" class="site-title">Movies Diary</a>
|
||||
<nav>
|
||||
<a href="/">Feed</a>
|
||||
<a href="/users">Users</a>
|
||||
<a href="{{ ctx.rss_url }}">RSS</a>
|
||||
{% if let Some(email) = ctx.user_email %}
|
||||
<a href="/reviews/new">Add Review</a>
|
||||
<a href="/logout">Logout</a>
|
||||
{% else %}
|
||||
<a href="/login">Login</a>
|
||||
{% if ctx.register_enabled %}
|
||||
<a href="/register">Register</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</nav>
|
||||
</header>
|
||||
<main>
|
||||
{% block content %}{% endblock %}
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,76 +1,51 @@
|
||||
<!-- crates/presentation/templates/diary.html -->
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>My Movie Diary</title>
|
||||
<style>
|
||||
/* Minimalist old-school styling */
|
||||
body { font-family: monospace; max-width: 800px; margin: 0 auto; padding: 20px; }
|
||||
.entry { border-bottom: 1px solid #ccc; padding: 10px 0; }
|
||||
.poster { max-width: 100px; float: left; margin-right: 15px; }
|
||||
.clear { clear: both; }
|
||||
.error { color: red; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Movie Diary</h1>
|
||||
|
||||
<!-- Zero-JS Form Submission -->
|
||||
<form action="/reviews" method="POST">
|
||||
<fieldset>
|
||||
<legend>Log a Movie</legend>
|
||||
|
||||
<label for="tmdb_id">TMDB ID (Optional):</label>
|
||||
<input type="text" name="external_metadata_id" id="tmdb_id"><br><br>
|
||||
|
||||
<label for="title">Title (Fallback):</label>
|
||||
<input type="text" name="manual_title" id="title"><br><br>
|
||||
|
||||
<label for="year">Year (Fallback):</label>
|
||||
<input type="number" name="manual_release_year" id="year" min="1888"><br><br>
|
||||
|
||||
<label for="rating">Rating (0-5):</label>
|
||||
<input type="number" name="rating" id="rating" min="0" max="5" required><br><br>
|
||||
|
||||
<button type="submit">Log Movie</button>
|
||||
</fieldset>
|
||||
</form>
|
||||
|
||||
<hr>
|
||||
|
||||
<!-- Rendering the Domain Models -->
|
||||
<div class="diary-entries">
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="diary">
|
||||
{% for entry in entries %}
|
||||
<div class="entry">
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<!-- Assuming you have a route to serve the raw images -->
|
||||
<img src="/static/posters/{{ poster.value() }}" class="poster" alt="Poster">
|
||||
{% endif %}
|
||||
|
||||
<h3>{{ entry.movie().title().value() }} ({{ entry.movie().release_year().value() }})</h3>
|
||||
<p><strong>Rating:</strong> {{ entry.review().rating().value() }} / 5</p>
|
||||
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<p><em>"{{ comment.value() }}"</em></p>
|
||||
{% endif %}
|
||||
|
||||
<p><small>Watched on: {{ entry.review().watched_at().format("%Y-%m-%d") }}</small></p>
|
||||
<div class="clear"></div>
|
||||
<div class="poster">
|
||||
<img src="/posters/{{ poster.value() }}" alt="">
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">
|
||||
{{ entry.movie().title().value() }}
|
||||
<span class="year">({{ entry.movie().release_year().value() }})</span>
|
||||
</div>
|
||||
{% if let Some(dir) = entry.movie().director() %}
|
||||
<div class="director">{{ dir }}</div>
|
||||
{% endif %}
|
||||
<div class="rating">
|
||||
<span class="star {% if entry.review().rating().value() >= 1 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 2 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 3 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 4 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 5 %}filled{% else %}empty{% endif %}">★</span>
|
||||
</div>
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<div class="comment">{{ comment.value() }}</div>
|
||||
{% endif %}
|
||||
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
|
||||
{% if let Some(uid) = ctx.user_id %}
|
||||
{% if *uid == entry.review().user_id().value() %}
|
||||
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
|
||||
<button type="submit">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% else %}
|
||||
<p>No movies logged yet. Go watch something!</p>
|
||||
<p class="empty">No movies logged yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<!-- Simple Pagination -->
|
||||
<div>
|
||||
<nav class="pagination">
|
||||
{% if current_offset > 0 %}
|
||||
<a href="/diary?offset={{ current_offset - limit }}">Previous Page</a>
|
||||
<a href="/?offset={{ current_offset - limit }}">← Prev</a>
|
||||
{% endif %}
|
||||
{% if has_more %}
|
||||
<a href="/diary?offset={{ current_offset + limit }}">Next Page</a>
|
||||
<a href="/?offset={{ current_offset + limit }}">Next →</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
</nav>
|
||||
{% endblock %}
|
||||
|
||||
18
crates/adapters/template-askama/templates/login.html
Normal file
18
crates/adapters/template-askama/templates/login.html
Normal file
@@ -0,0 +1,18 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>Login</h1>
|
||||
{% if let Some(err) = error %}
|
||||
<p class="error">{{ err }}</p>
|
||||
{% endif %}
|
||||
<form method="POST" action="/login">
|
||||
<label>
|
||||
Email<br>
|
||||
<input type="email" name="email" required autocomplete="email">
|
||||
</label>
|
||||
<label>
|
||||
Password<br>
|
||||
<input type="password" name="password" required autocomplete="current-password">
|
||||
</label>
|
||||
<button type="submit">Login</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
40
crates/adapters/template-askama/templates/new_review.html
Normal file
40
crates/adapters/template-askama/templates/new_review.html
Normal file
@@ -0,0 +1,40 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>Log a Review</h1>
|
||||
{% if let Some(err) = error %}
|
||||
<p class="error">{{ err }}</p>
|
||||
{% endif %}
|
||||
<form method="POST" action="/reviews">
|
||||
<label>
|
||||
OMDB ID <span class="optional">(optional)</span><br>
|
||||
<input type="text" name="external_metadata_id" placeholder="tt0166924">
|
||||
</label>
|
||||
<hr>
|
||||
<label>
|
||||
Title<br>
|
||||
<input type="text" name="manual_title">
|
||||
</label>
|
||||
<label>
|
||||
Year<br>
|
||||
<input type="number" name="manual_release_year" min="1888" max="2100">
|
||||
</label>
|
||||
<label>
|
||||
Director<br>
|
||||
<input type="text" name="manual_director">
|
||||
</label>
|
||||
<hr>
|
||||
<label>
|
||||
Rating (0–5)<br>
|
||||
<input type="number" name="rating" min="0" max="5" required>
|
||||
</label>
|
||||
<label>
|
||||
Watched<br>
|
||||
<input type="datetime-local" name="watched_at" required>
|
||||
</label>
|
||||
<label>
|
||||
Comment<br>
|
||||
<textarea name="comment"></textarea>
|
||||
</label>
|
||||
<button type="submit">Log Review</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
165
crates/adapters/template-askama/templates/profile.html
Normal file
165
crates/adapters/template-askama/templates/profile.html
Normal file
@@ -0,0 +1,165 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="profile">
|
||||
|
||||
<div class="stats-header">
|
||||
<div class="profile-name">{{ profile_display_name }}</div>
|
||||
<div class="stats-grid">
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.total_movies }}</div>
|
||||
<div class="stat-label">movies</div>
|
||||
</div>
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.avg_rating_display() }}★</div>
|
||||
<div class="stat-label">avg rating</div>
|
||||
</div>
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.favorite_director_display() }}</div>
|
||||
<div class="stat-label">fav director</div>
|
||||
</div>
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.most_active_month_display() }}</div>
|
||||
<div class="stat-label">most active</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="view-tabs">
|
||||
<a href="?view=recent" class="view-tab {% if view == "recent" %}active{% endif %}">Recent</a>
|
||||
<a href="?view=ratings" class="view-tab {% if view == "ratings" %}active{% endif %}">Top Rated</a>
|
||||
<a href="?view=history" class="view-tab {% if view == "history" %}active{% endif %}">History</a>
|
||||
<a href="?view=trends" class="view-tab {% if view == "trends" %}active{% endif %}">Trends</a>
|
||||
</div>
|
||||
|
||||
{% if view == "history" %}
|
||||
{% if let Some(hist) = history %}
|
||||
<div class="heatmap-section">
|
||||
<div class="heatmap-label">Movies watched this year</div>
|
||||
<div class="heatmap">
|
||||
{% for cell in heatmap %}
|
||||
<div class="heatmap-cell" style="--alpha: {{ cell.alpha }}">
|
||||
<div class="heatmap-count">{{ cell.count }}</div>
|
||||
<div class="heatmap-month">{{ cell.month_label }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% for month in hist %}
|
||||
<div class="history-month">
|
||||
<h3 class="month-heading">{{ month.month_label }} <span class="month-count">{{ month.count }}</span></h3>
|
||||
<div class="diary">
|
||||
{% for entry in month.entries %}
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<div class="poster"><img src="/posters/{{ poster.value() }}" alt=""></div>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">{{ entry.movie().title().value() }} <span class="year">({{ entry.movie().release_year().value() }})</span></div>
|
||||
{% if let Some(dir) = entry.movie().director() %}<div class="director">{{ dir }}</div>{% endif %}
|
||||
<div class="rating">
|
||||
{% for filled in entry.review().stars() %}
|
||||
<span class="star {% if filled %}filled{% else %}empty{% endif %}">★</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<div class="watched-at">{{ entry.review().watched_at().format("%b %-d") }}</div>
|
||||
</div>
|
||||
</article>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="empty">No movies logged yet.</p>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% elif view == "trends" %}
|
||||
{% if let Some(t) = trends %}
|
||||
<div class="trends-section">
|
||||
{% if !monthly_rating_rows.is_empty() %}
|
||||
<div class="chart-block">
|
||||
<div class="chart-label">Average rating per month</div>
|
||||
<div class="bar-chart">
|
||||
{% for row in monthly_rating_rows %}
|
||||
<div class="bar-col">
|
||||
<div class="bar-value">{{ "{:.1}"|format(row.rating.avg_rating) }}</div>
|
||||
<div class="bar-fill" style="height: {{ row.bar_height_px }}px"></div>
|
||||
<div class="bar-month">{{ row.rating.month_label }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if !t.top_directors.is_empty() %}
|
||||
<div class="chart-block">
|
||||
<div class="chart-label">Most watched directors</div>
|
||||
<div class="director-chart">
|
||||
{% for d in t.top_directors %}
|
||||
<div class="director-row">
|
||||
<div class="director-name">{{ d.director }}</div>
|
||||
<div class="director-bar">
|
||||
{% if t.max_director_count > 0 %}
|
||||
<div class="director-bar-fill" style="width: {{ d.count * 100 / t.max_director_count }}%"></div>
|
||||
{% else %}
|
||||
<div class="director-bar-fill" style="width: 0%"></div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="director-count">{{ d.count }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
{% if let Some(paged) = entries %}
|
||||
<div class="diary">
|
||||
{% for entry in paged.items %}
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<div class="poster">
|
||||
<img src="/posters/{{ poster.value() }}" alt="">
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">
|
||||
{{ entry.movie().title().value() }}
|
||||
<span class="year">({{ entry.movie().release_year().value() }})</span>
|
||||
</div>
|
||||
{% if let Some(dir) = entry.movie().director() %}
|
||||
<div class="director">{{ dir }}</div>
|
||||
{% endif %}
|
||||
<div class="rating">
|
||||
{% for filled in entry.review().stars() %}
|
||||
<span class="star {% if filled %}filled{% else %}empty{% endif %}">★</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<div class="comment">{{ comment.value() }}</div>
|
||||
{% endif %}
|
||||
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
|
||||
{% if ctx.is_current_user(entry.review().user_id().value()) %}
|
||||
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
|
||||
<button type="submit">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% else %}
|
||||
<p class="empty">No reviews yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<nav class="pagination">
|
||||
{% if current_offset >= limit %}
|
||||
<a href="?view={{ view }}&offset={{ current_offset - limit }}">← Prev</a>
|
||||
{% endif %}
|
||||
{% if has_more %}
|
||||
<a href="?view={{ view }}&offset={{ current_offset + limit }}">Next →</a>
|
||||
{% endif %}
|
||||
</nav>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
{% endblock %}
|
||||
18
crates/adapters/template-askama/templates/register.html
Normal file
18
crates/adapters/template-askama/templates/register.html
Normal file
@@ -0,0 +1,18 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>Register</h1>
|
||||
{% if let Some(err) = error %}
|
||||
<p class="error">{{ err }}</p>
|
||||
{% endif %}
|
||||
<form method="POST" action="/register">
|
||||
<label>
|
||||
Email<br>
|
||||
<input type="email" name="email" required autocomplete="email">
|
||||
</label>
|
||||
<label>
|
||||
Password<br>
|
||||
<input type="password" name="password" required autocomplete="new-password">
|
||||
</label>
|
||||
<button type="submit">Register</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
18
crates/adapters/template-askama/templates/users.html
Normal file
18
crates/adapters/template-askama/templates/users.html
Normal file
@@ -0,0 +1,18 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="users-list">
|
||||
<h2 class="page-title">Members</h2>
|
||||
{% for user in users %}
|
||||
<div class="user-row">
|
||||
<div class="user-avatar">{{ user.initial() }}</div>
|
||||
<div class="user-info">
|
||||
<div class="user-name">{{ user.display_name() }}</div>
|
||||
<div class="user-meta">{{ user.total_movies }} movies · avg {{ user.avg_rating_display() }}★</div>
|
||||
</div>
|
||||
<a href="/users/{{ user.user_id.value() }}" class="btn-secondary">View profile →</a>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="empty">No users yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -4,7 +4,11 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
domain = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true }
|
||||
|
||||
@@ -14,6 +14,7 @@ pub struct LogReviewCommand {
|
||||
pub watched_at: NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SyncPosterCommand {
|
||||
pub movie_id: Uuid,
|
||||
pub external_metadata_id: String,
|
||||
@@ -28,3 +29,8 @@ pub struct RegisterCommand {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
pub struct DeleteReviewCommand {
|
||||
pub review_id: Uuid,
|
||||
pub requesting_user_id: Uuid,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#[derive(Clone)]
|
||||
pub struct AppConfig {
|
||||
pub allow_registration: bool,
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
impl AppConfig {
|
||||
@@ -8,6 +9,8 @@ impl AppConfig {
|
||||
let allow_registration = std::env::var("ALLOW_REGISTRATION")
|
||||
.map(|v| v == "true" || v == "1")
|
||||
.unwrap_or(false);
|
||||
Self { allow_registration }
|
||||
let base_url = std::env::var("BASE_URL")
|
||||
.unwrap_or_else(|_| "http://localhost:3000".to_string());
|
||||
Self { allow_registration, base_url }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod commands;
|
||||
pub mod config;
|
||||
pub mod context;
|
||||
pub mod movie_resolver;
|
||||
pub mod ports;
|
||||
pub mod queries;
|
||||
pub mod use_cases;
|
||||
|
||||
595
crates/application/src/movie_resolver.rs
Normal file
595
crates/application/src/movie_resolver.rs
Normal file
@@ -0,0 +1,595 @@
|
||||
use async_trait::async_trait;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::Movie,
|
||||
ports::{MetadataClient, MetadataSearchCriteria, MovieRepository},
|
||||
value_objects::{ExternalMetadataId, MovieTitle, ReleaseYear},
|
||||
};
|
||||
|
||||
use crate::commands::LogReviewCommand;
|
||||
|
||||
pub struct MovieResolverDeps<'a> {
|
||||
pub repository: &'a dyn MovieRepository,
|
||||
pub metadata_client: &'a dyn MetadataClient,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait ResolutionStrategy: Send + Sync {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool;
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError>;
|
||||
}
|
||||
|
||||
pub struct ExternalIdStrategy;
|
||||
pub struct TitleSearchStrategy;
|
||||
pub struct ManualMovieStrategy;
|
||||
|
||||
pub struct MovieResolver {
|
||||
strategies: Vec<Box<dyn ResolutionStrategy>>,
|
||||
}
|
||||
|
||||
impl MovieResolver {
|
||||
pub fn default_pipeline() -> Self {
|
||||
Self {
|
||||
strategies: vec![
|
||||
Box::new(ExternalIdStrategy),
|
||||
Box::new(TitleSearchStrategy),
|
||||
Box::new(ManualMovieStrategy),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<(Movie, bool), DomainError> {
|
||||
for strategy in &self.strategies {
|
||||
if strategy.can_handle(cmd) {
|
||||
if let Some(result) = strategy.resolve(cmd, deps).await? {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(DomainError::ValidationError(
|
||||
"Manual title required if TMDB fetch fails or is omitted".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ResolutionStrategy for ExternalIdStrategy {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
|
||||
cmd.external_metadata_id.is_some()
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let ext_id_str = cmd.external_metadata_id.as_deref().unwrap();
|
||||
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
|
||||
|
||||
if let Some(m) = deps.repository.get_movie_by_external_id(&tmdb_id).await? {
|
||||
return Ok(Some((m, false)));
|
||||
}
|
||||
|
||||
match deps
|
||||
.metadata_client
|
||||
.fetch_movie_metadata(&MetadataSearchCriteria::ImdbId(tmdb_id))
|
||||
.await
|
||||
{
|
||||
Ok(m) => Ok(Some((m, true))),
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
|
||||
e
|
||||
);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ResolutionStrategy for TitleSearchStrategy {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
|
||||
cmd.manual_title.is_some()
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let title = cmd.manual_title.as_deref().unwrap();
|
||||
let criteria = MetadataSearchCriteria::Title {
|
||||
title: title.to_string(),
|
||||
year: cmd.manual_release_year,
|
||||
};
|
||||
match deps.metadata_client.fetch_movie_metadata(&criteria).await {
|
||||
Ok(m) => Ok(Some((m, true))),
|
||||
Err(e) => {
|
||||
tracing::warn!("OMDb title search failed, falling back to manual: {:?}", e);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ResolutionStrategy for ManualMovieStrategy {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
|
||||
cmd.manual_title.is_some()
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let title_str = match &cmd.manual_title {
|
||||
Some(t) => t,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let year_val = cmd.manual_release_year.ok_or_else(|| {
|
||||
DomainError::ValidationError(
|
||||
"Manual release year required if TMDB fetch fails or is omitted".into(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let title = MovieTitle::new(title_str.clone())?;
|
||||
let release_year = ReleaseYear::new(year_val)?;
|
||||
|
||||
let candidates = deps
|
||||
.repository
|
||||
.get_movies_by_title_and_year(&title, &release_year)
|
||||
.await?;
|
||||
|
||||
let matched = candidates
|
||||
.into_iter()
|
||||
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
|
||||
|
||||
if let Some(existing) = matched {
|
||||
Ok(Some((existing, false)))
|
||||
} else {
|
||||
let new_movie =
|
||||
Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
|
||||
Ok(Some((new_movie, true)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::NaiveDate;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
|
||||
ports::{MetadataSearchCriteria, MovieRepository},
|
||||
value_objects::{
|
||||
ExternalMetadataId, MovieId, MovieTitle, PosterUrl, ReleaseYear, ReviewId,
|
||||
},
|
||||
};
|
||||
|
||||
fn make_cmd(ext_id: Option<&str>, title: Option<&str>, year: Option<u16>) -> LogReviewCommand {
|
||||
LogReviewCommand {
|
||||
external_metadata_id: ext_id.map(String::from),
|
||||
manual_title: title.map(String::from),
|
||||
manual_release_year: year,
|
||||
manual_director: None,
|
||||
user_id: uuid::Uuid::new_v4(),
|
||||
rating: 4,
|
||||
comment: None,
|
||||
watched_at: NaiveDate::from_ymd_opt(2024, 1, 1)
|
||||
.unwrap()
|
||||
.and_hms_opt(0, 0, 0)
|
||||
.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_movie() -> Movie {
|
||||
Movie::new(
|
||||
None,
|
||||
MovieTitle::new("Inception".to_string()).unwrap(),
|
||||
ReleaseYear::new(2010).unwrap(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
struct RepoWithExternalMovie(Movie);
|
||||
struct RepoEmpty;
|
||||
struct RepoWithTitleMatch(Movie);
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for RepoWithExternalMovie {
|
||||
async fn get_movie_by_external_id(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<Movie>, DomainError> {
|
||||
Ok(Some(self.0.clone()))
|
||||
}
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movies_by_title_and_year(
|
||||
&self,
|
||||
_: &MovieTitle,
|
||||
_: &ReleaseYear,
|
||||
) -> Result<Vec<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_diary(
|
||||
&self,
|
||||
_: &DiaryFilter,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_by_id(
|
||||
&self,
|
||||
_: &ReviewId,
|
||||
) -> Result<Option<Review>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for RepoEmpty {
|
||||
async fn get_movie_by_external_id(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<Movie>, DomainError> {
|
||||
Ok(None)
|
||||
}
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movies_by_title_and_year(
|
||||
&self,
|
||||
_: &MovieTitle,
|
||||
_: &ReleaseYear,
|
||||
) -> Result<Vec<Movie>, DomainError> {
|
||||
Ok(vec![])
|
||||
}
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_diary(
|
||||
&self,
|
||||
_: &DiaryFilter,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_by_id(
|
||||
&self,
|
||||
_: &ReviewId,
|
||||
) -> Result<Option<Review>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for RepoWithTitleMatch {
|
||||
async fn get_movie_by_external_id(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movies_by_title_and_year(
|
||||
&self,
|
||||
_: &MovieTitle,
|
||||
_: &ReleaseYear,
|
||||
) -> Result<Vec<Movie>, DomainError> {
|
||||
Ok(vec![self.0.clone()])
|
||||
}
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_diary(
|
||||
&self,
|
||||
_: &DiaryFilter,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_by_id(
|
||||
&self,
|
||||
_: &ReviewId,
|
||||
) -> Result<Option<Review>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
struct MetaReturnsMovie(Movie);
|
||||
struct MetaErrors;
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for MetaReturnsMovie {
|
||||
async fn fetch_movie_metadata(
|
||||
&self,
|
||||
_: &MetadataSearchCriteria,
|
||||
) -> Result<Movie, DomainError> {
|
||||
Ok(self.0.clone())
|
||||
}
|
||||
async fn get_poster_url(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<PosterUrl>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for MetaErrors {
|
||||
async fn fetch_movie_metadata(
|
||||
&self,
|
||||
_: &MetadataSearchCriteria,
|
||||
) -> Result<Movie, DomainError> {
|
||||
Err(DomainError::InfrastructureError("metadata unavailable".into()))
|
||||
}
|
||||
async fn get_poster_url(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<PosterUrl>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
}
|
||||
|
||||
// --- ExternalIdStrategy ---
|
||||
|
||||
#[test]
|
||||
fn external_id_strategy_can_handle_cmd_with_id() {
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
assert!(ExternalIdStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_id_strategy_cannot_handle_cmd_without_id() {
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
assert!(!ExternalIdStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_id_strategy_returns_cached_movie() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoWithExternalMovie(movie.clone());
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, false))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_id_strategy_fetches_from_metadata_when_not_cached() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaReturnsMovie(movie);
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, true))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_id_strategy_falls_through_on_metadata_error() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
// --- TitleSearchStrategy ---
|
||||
|
||||
#[test]
|
||||
fn title_strategy_can_handle_cmd_with_title() {
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
assert!(TitleSearchStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn title_strategy_cannot_handle_cmd_without_title() {
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
assert!(!TitleSearchStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn title_strategy_fetches_from_metadata() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaReturnsMovie(movie);
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, true))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn title_strategy_falls_through_on_metadata_error() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
// --- ManualMovieStrategy ---
|
||||
|
||||
#[test]
|
||||
fn manual_strategy_can_handle_cmd_with_title() {
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
assert!(ManualMovieStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn manual_strategy_cannot_handle_cmd_without_title() {
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
assert!(!ManualMovieStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn manual_strategy_returns_existing_movie() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoWithTitleMatch(movie.clone());
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, false))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn manual_strategy_creates_new_movie_when_no_match() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, true))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn manual_strategy_errors_without_year() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), None);
|
||||
assert!(ManualMovieStrategy.resolve(&cmd, &deps).await.is_err());
|
||||
}
|
||||
|
||||
// --- MovieResolver pipeline ---
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolver_returns_error_when_no_strategy_matches() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, None, None);
|
||||
let result = MovieResolver::default_pipeline().resolve(&cmd, &deps).await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolver_uses_cached_movie_when_external_id_matches() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoWithExternalMovie(movie.clone());
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let (_, is_new) = MovieResolver::default_pipeline()
|
||||
.resolve(&cmd, &deps)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!is_new);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolver_falls_through_to_manual_when_external_and_title_both_fail() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), Some("Inception"), Some(2010));
|
||||
let (_, is_new) = MovieResolver::default_pipeline()
|
||||
.resolve(&cmd, &deps)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(is_new);
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,74 @@
|
||||
use domain::models::{DiaryEntry, collections::Paginated};
|
||||
use uuid::Uuid;
|
||||
|
||||
use domain::models::{DiaryEntry, FeedEntry, MonthActivity, UserStats, UserSummary, UserTrends, collections::Paginated};
|
||||
|
||||
pub struct HtmlPageContext {
|
||||
pub user_email: Option<String>,
|
||||
pub user_id: Option<Uuid>,
|
||||
pub register_enabled: bool,
|
||||
pub rss_url: String,
|
||||
pub page_title: String,
|
||||
pub canonical_url: String,
|
||||
}
|
||||
|
||||
impl HtmlPageContext {
|
||||
pub fn is_current_user(&self, id: Uuid) -> bool {
|
||||
self.user_id == Some(id)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LoginPageData<'a> {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub error: Option<&'a str>,
|
||||
}
|
||||
|
||||
pub struct RegisterPageData<'a> {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub error: Option<&'a str>,
|
||||
}
|
||||
|
||||
pub struct NewReviewPageData<'a> {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub error: Option<&'a str>,
|
||||
}
|
||||
|
||||
pub struct ActivityFeedPageData {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub entries: Paginated<FeedEntry>,
|
||||
pub current_offset: u32,
|
||||
pub has_more: bool,
|
||||
pub limit: u32,
|
||||
}
|
||||
|
||||
pub struct UsersPageData {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub users: Vec<UserSummary>,
|
||||
}
|
||||
|
||||
pub struct ProfilePageData {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub profile_user_id: Uuid,
|
||||
pub profile_user_email: String,
|
||||
pub stats: UserStats,
|
||||
pub view: String,
|
||||
pub entries: Option<Paginated<DiaryEntry>>,
|
||||
pub current_offset: u32,
|
||||
pub has_more: bool,
|
||||
pub limit: u32,
|
||||
pub history: Option<Vec<MonthActivity>>,
|
||||
pub trends: Option<UserTrends>,
|
||||
}
|
||||
|
||||
pub trait HtmlRenderer: Send + Sync {
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String>;
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String>;
|
||||
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String>;
|
||||
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String>;
|
||||
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String>;
|
||||
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String>;
|
||||
fn render_users_page(&self, data: UsersPageData) -> Result<String, String>;
|
||||
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String>;
|
||||
}
|
||||
|
||||
pub trait RssFeedRenderer: Send + Sync {
|
||||
fn render_feed(&self, entries: &[DiaryEntry]) -> Result<String, String>;
|
||||
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String>;
|
||||
}
|
||||
|
||||
@@ -6,8 +6,23 @@ pub struct GetDiaryQuery {
|
||||
pub offset: Option<u32>,
|
||||
pub sort_by: Option<SortDirection>,
|
||||
pub movie_id: Option<Uuid>,
|
||||
pub user_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
pub struct GetReviewHistoryQuery {
|
||||
pub movie_id: Uuid,
|
||||
}
|
||||
|
||||
pub struct GetActivityFeedQuery {
|
||||
pub limit: Option<u32>,
|
||||
pub offset: Option<u32>,
|
||||
}
|
||||
|
||||
pub struct GetUsersQuery;
|
||||
|
||||
pub struct GetUserProfileQuery {
|
||||
pub user_id: Uuid,
|
||||
pub view: String,
|
||||
pub limit: Option<u32>,
|
||||
pub offset: Option<u32>,
|
||||
}
|
||||
|
||||
27
crates/application/src/use_cases/delete_review.rs
Normal file
27
crates/application/src/use_cases/delete_review.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use domain::{errors::DomainError, value_objects::{ReviewId, UserId}};
|
||||
use crate::{commands::DeleteReviewCommand, context::AppContext};
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: DeleteReviewCommand) -> Result<(), DomainError> {
|
||||
let review_id = ReviewId::from_uuid(cmd.review_id);
|
||||
let requesting_user_id = UserId::from_uuid(cmd.requesting_user_id);
|
||||
|
||||
let review = ctx
|
||||
.repository
|
||||
.get_review_by_id(&review_id)
|
||||
.await?
|
||||
.ok_or_else(|| DomainError::NotFound(format!("review {}", cmd.review_id)))?;
|
||||
|
||||
if review.user_id() != &requesting_user_id {
|
||||
return Err(DomainError::Unauthorized("not your review".into()));
|
||||
}
|
||||
|
||||
let movie_id = review.movie_id().clone();
|
||||
ctx.repository.delete_review(&review_id).await?;
|
||||
|
||||
let history = ctx.repository.get_review_history(&movie_id).await?;
|
||||
if history.viewings().is_empty() {
|
||||
ctx.repository.delete_movie(&movie_id).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
13
crates/application/src/use_cases/get_activity_feed.rs
Normal file
13
crates/application/src/use_cases/get_activity_feed.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::{FeedEntry, collections::{PageParams, Paginated}},
|
||||
};
|
||||
use crate::{context::AppContext, queries::GetActivityFeedQuery};
|
||||
|
||||
pub async fn execute(
|
||||
ctx: &AppContext,
|
||||
query: GetActivityFeedQuery,
|
||||
) -> Result<Paginated<FeedEntry>, DomainError> {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
ctx.repository.query_activity_feed(&page).await
|
||||
}
|
||||
@@ -4,7 +4,7 @@ use domain::{
|
||||
DiaryEntry, DiaryFilter, SortDirection,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
value_objects::MovieId,
|
||||
value_objects::{MovieId, UserId},
|
||||
};
|
||||
|
||||
use crate::{context::AppContext, queries::GetDiaryQuery};
|
||||
@@ -14,16 +14,15 @@ pub async fn execute(
|
||||
query: GetDiaryQuery,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
|
||||
let movie_id = query.movie_id.map(MovieId::from_uuid);
|
||||
let user_id = query.user_id.map(UserId::from_uuid);
|
||||
|
||||
let filter = DiaryFilter {
|
||||
sort_by: query.sort_by.unwrap_or(SortDirection::Descending),
|
||||
page,
|
||||
movie_id,
|
||||
user_id,
|
||||
};
|
||||
|
||||
let paginated_results = ctx.repository.query_diary(&filter).await?;
|
||||
|
||||
Ok(paginated_results)
|
||||
ctx.repository.query_diary(&filter).await
|
||||
}
|
||||
|
||||
93
crates/application/src/use_cases/get_user_profile.rs
Normal file
93
crates/application/src/use_cases/get_user_profile.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::{
|
||||
DiaryEntry, DiaryFilter, MonthActivity, SortDirection, UserStats, UserTrends,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
value_objects::UserId,
|
||||
};
|
||||
use crate::{context::AppContext, queries::GetUserProfileQuery};
|
||||
|
||||
pub struct UserProfileData {
|
||||
pub stats: UserStats,
|
||||
pub entries: Option<Paginated<DiaryEntry>>,
|
||||
pub history: Option<Vec<MonthActivity>>,
|
||||
pub trends: Option<UserTrends>,
|
||||
}
|
||||
|
||||
pub async fn execute(
|
||||
ctx: &AppContext,
|
||||
query: GetUserProfileQuery,
|
||||
) -> Result<UserProfileData, DomainError> {
|
||||
let user_id = UserId::from_uuid(query.user_id);
|
||||
let stats = ctx.repository.get_user_stats(&user_id).await?;
|
||||
|
||||
match query.view.as_str() {
|
||||
"history" => {
|
||||
// V1: loads all entries into memory. Personal diaries are bounded in size;
|
||||
// spec calls for showing every movie grouped by month, so full load is intentional.
|
||||
let all_entries = ctx.repository.get_user_history(&user_id).await?;
|
||||
let history = group_by_month(all_entries);
|
||||
Ok(UserProfileData { stats, entries: None, history: Some(history), trends: None })
|
||||
}
|
||||
"trends" => {
|
||||
let trends = ctx.repository.get_user_trends(&user_id).await?;
|
||||
Ok(UserProfileData { stats, entries: None, history: None, trends: Some(trends) })
|
||||
}
|
||||
"ratings" => {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
let filter = DiaryFilter {
|
||||
sort_by: SortDirection::ByRatingDesc,
|
||||
page,
|
||||
movie_id: None,
|
||||
user_id: Some(user_id),
|
||||
};
|
||||
let entries = ctx.repository.query_diary(&filter).await?;
|
||||
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
|
||||
}
|
||||
"recent" => {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
let filter = DiaryFilter {
|
||||
sort_by: SortDirection::Descending,
|
||||
page,
|
||||
movie_id: None,
|
||||
user_id: Some(user_id),
|
||||
};
|
||||
let entries = ctx.repository.query_diary(&filter).await?;
|
||||
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
|
||||
}
|
||||
other => Err(DomainError::ValidationError(format!("unknown view: {}", other))),
|
||||
}
|
||||
}
|
||||
|
||||
fn group_by_month(entries: Vec<DiaryEntry>) -> Vec<MonthActivity> {
|
||||
use std::collections::BTreeMap;
|
||||
let mut map: BTreeMap<String, Vec<DiaryEntry>> = BTreeMap::new();
|
||||
for entry in entries {
|
||||
let ym = entry.review().watched_at().format("%Y-%m").to_string();
|
||||
map.entry(ym).or_default().push(entry);
|
||||
}
|
||||
let mut result: Vec<MonthActivity> = map
|
||||
.into_iter()
|
||||
.map(|(ym, entries)| MonthActivity {
|
||||
month_label: format_year_month_long(&ym),
|
||||
count: entries.len() as i64,
|
||||
entries,
|
||||
year_month: ym,
|
||||
})
|
||||
.collect();
|
||||
result.reverse();
|
||||
result
|
||||
}
|
||||
|
||||
fn format_year_month_long(ym: &str) -> String {
|
||||
let parts: Vec<&str> = ym.splitn(2, '-').collect();
|
||||
if parts.len() != 2 { return ym.to_string(); }
|
||||
let month = match parts[1] {
|
||||
"01" => "January", "02" => "February", "03" => "March", "04" => "April",
|
||||
"05" => "May", "06" => "June", "07" => "July", "08" => "August",
|
||||
"09" => "September", "10" => "October", "11" => "November", "12" => "December",
|
||||
_ => parts[1],
|
||||
};
|
||||
format!("{} {}", month, parts[0])
|
||||
}
|
||||
9
crates/application/src/use_cases/get_users.rs
Normal file
9
crates/application/src/use_cases/get_users.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use domain::{errors::DomainError, models::UserSummary};
|
||||
use crate::{context::AppContext, queries::GetUsersQuery};
|
||||
|
||||
pub async fn execute(
|
||||
ctx: &AppContext,
|
||||
_query: GetUsersQuery,
|
||||
) -> Result<Vec<UserSummary>, DomainError> {
|
||||
ctx.user_repository.list_with_stats().await
|
||||
}
|
||||
@@ -2,18 +2,25 @@ use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{Movie, Review},
|
||||
ports::MetadataSearchCriteria,
|
||||
value_objects::{Comment, ExternalMetadataId, MovieTitle, Rating, ReleaseYear, UserId},
|
||||
value_objects::{Comment, Rating, UserId},
|
||||
};
|
||||
|
||||
use crate::{commands::LogReviewCommand, context::AppContext};
|
||||
use crate::{
|
||||
commands::LogReviewCommand,
|
||||
context::AppContext,
|
||||
movie_resolver::{MovieResolver, MovieResolverDeps},
|
||||
};
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), DomainError> {
|
||||
let rating = Rating::new(cmd.rating)?;
|
||||
let user_id = UserId::from_uuid(cmd.user_id);
|
||||
let comment = cmd.comment.clone().map(Comment::new).transpose()?;
|
||||
|
||||
let (movie, is_new_movie) = resolve_movie(ctx, &cmd).await?;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: ctx.repository.as_ref(),
|
||||
metadata_client: ctx.metadata_client.as_ref(),
|
||||
};
|
||||
let (movie, is_new_movie) = MovieResolver::default_pipeline().resolve(&cmd, &deps).await?;
|
||||
|
||||
ctx.repository.upsert_movie(&movie).await?;
|
||||
|
||||
@@ -25,101 +32,6 @@ pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), Doma
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn resolve_movie(
|
||||
ctx: &AppContext,
|
||||
cmd: &LogReviewCommand,
|
||||
) -> Result<(Movie, bool), DomainError> {
|
||||
if let Some(ext_id_str) = &cmd.external_metadata_id {
|
||||
if let Some(resolved) = resolve_external_movie(ctx, ext_id_str).await? {
|
||||
return Ok(resolved);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(title) = &cmd.manual_title {
|
||||
if let Some(resolved) = resolve_by_title(ctx, title, cmd.manual_release_year).await? {
|
||||
return Ok(resolved);
|
||||
}
|
||||
}
|
||||
|
||||
resolve_manual_movie(ctx, cmd).await
|
||||
}
|
||||
|
||||
async fn resolve_external_movie(
|
||||
ctx: &AppContext,
|
||||
ext_id_str: &str,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
|
||||
|
||||
if let Some(m) = ctx.repository.get_movie_by_external_id(&tmdb_id).await? {
|
||||
return Ok(Some((m, false)));
|
||||
}
|
||||
|
||||
match ctx
|
||||
.metadata_client
|
||||
.fetch_movie_metadata(&MetadataSearchCriteria::ImdbId(tmdb_id))
|
||||
.await
|
||||
{
|
||||
Ok(m) => Ok(Some((m, true))),
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
|
||||
e
|
||||
);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn resolve_by_title(
|
||||
ctx: &AppContext,
|
||||
title: &str,
|
||||
year: Option<u16>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let criteria = MetadataSearchCriteria::Title { title: title.to_string(), year };
|
||||
match ctx.metadata_client.fetch_movie_metadata(&criteria).await {
|
||||
Ok(m) => Ok(Some((m, true))),
|
||||
Err(e) => {
|
||||
tracing::warn!("OMDb title search failed, falling back to manual: {:?}", e);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn resolve_manual_movie(
|
||||
ctx: &AppContext,
|
||||
cmd: &LogReviewCommand,
|
||||
) -> Result<(Movie, bool), DomainError> {
|
||||
let title_str = cmd.manual_title.as_ref().ok_or_else(|| {
|
||||
DomainError::ValidationError(
|
||||
"Manual title required if TMDB fetch fails or is omitted".into(),
|
||||
)
|
||||
})?;
|
||||
let year_val = cmd.manual_release_year.ok_or_else(|| {
|
||||
DomainError::ValidationError(
|
||||
"Manual release year required if TMDB fetch fails or is omitted".into(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let title = MovieTitle::new(title_str.clone())?;
|
||||
let release_year = ReleaseYear::new(year_val)?;
|
||||
|
||||
let candidates = ctx
|
||||
.repository
|
||||
.get_movies_by_title_and_year(&title, &release_year)
|
||||
.await?;
|
||||
|
||||
let matched_movie = candidates
|
||||
.into_iter()
|
||||
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
|
||||
|
||||
if let Some(existing_movie) = matched_movie {
|
||||
Ok((existing_movie, false))
|
||||
} else {
|
||||
let new_movie = Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
|
||||
Ok((new_movie, true))
|
||||
}
|
||||
}
|
||||
|
||||
async fn publish_events(
|
||||
ctx: &AppContext,
|
||||
movie: &Movie,
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
pub mod delete_review;
|
||||
pub mod get_activity_feed;
|
||||
pub mod get_diary;
|
||||
pub mod get_review_history;
|
||||
pub mod get_user_profile;
|
||||
pub mod get_users;
|
||||
pub mod log_review;
|
||||
pub mod login;
|
||||
pub mod register;
|
||||
|
||||
@@ -2,15 +2,25 @@ use domain::{errors::DomainError, models::User, value_objects::Email};
|
||||
|
||||
use crate::{commands::RegisterCommand, context::AppContext};
|
||||
|
||||
const MIN_PASSWORD_LENGTH: usize = 8;
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> {
|
||||
if !ctx.config.allow_registration {
|
||||
return Err(DomainError::Unauthorized("Registration is disabled".into()));
|
||||
}
|
||||
|
||||
if cmd.password.len() < MIN_PASSWORD_LENGTH {
|
||||
return Err(DomainError::ValidationError(
|
||||
"Password must be at least 8 characters".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let email = Email::new(cmd.email)?;
|
||||
|
||||
if ctx.user_repository.find_by_email(&email).await?.is_some() {
|
||||
return Err(DomainError::ValidationError("Email already registered".into()));
|
||||
return Err(DomainError::ValidationError(
|
||||
"Email already registered".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let hash = ctx.password_hasher.hash(&cmd.password).await?;
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
[package]
|
||||
name = "common"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
thiserror = { workspace = true }
|
||||
@@ -1 +0,0 @@
|
||||
pub mod errors;
|
||||
@@ -10,5 +10,4 @@ async-trait = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
|
||||
common = { workspace = true }
|
||||
email_address = "0.2.9"
|
||||
|
||||
@@ -16,7 +16,7 @@ pub struct PageParams {
|
||||
|
||||
impl PageParams {
|
||||
const MAX_LIMIT: u32 = 100;
|
||||
const DEFAULT_LIMIT: u32 = 20;
|
||||
const DEFAULT_LIMIT: u32 = 5;
|
||||
|
||||
pub fn new(limit: Option<u32>, offset: Option<u32>) -> Result<Self, DomainError> {
|
||||
let l = limit.unwrap_or(Self::DEFAULT_LIMIT);
|
||||
|
||||
@@ -15,6 +15,7 @@ pub enum SortDirection {
|
||||
#[default]
|
||||
Descending,
|
||||
Ascending,
|
||||
ByRatingDesc,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@@ -22,6 +23,7 @@ pub struct DiaryFilter {
|
||||
pub sort_by: SortDirection,
|
||||
pub page: PageParams,
|
||||
pub movie_id: Option<MovieId>,
|
||||
pub user_id: Option<UserId>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -131,14 +133,6 @@ impl Review {
|
||||
comment: Option<Comment>,
|
||||
watched_at: NaiveDateTime,
|
||||
) -> Result<Self, DomainError> {
|
||||
let now = Utc::now().naive_utc();
|
||||
|
||||
if watched_at > now {
|
||||
return Err(DomainError::ValidationError(
|
||||
"watched_at cannot be in the future".into(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
id: ReviewId::generate(),
|
||||
movie_id,
|
||||
@@ -146,7 +140,7 @@ impl Review {
|
||||
rating,
|
||||
comment,
|
||||
watched_at,
|
||||
created_at: now,
|
||||
created_at: Utc::now().naive_utc(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -191,6 +185,11 @@ impl Review {
|
||||
pub fn created_at(&self) -> &NaiveDateTime {
|
||||
&self.created_at
|
||||
}
|
||||
/// Returns [star1_filled, star2_filled, ..., star5_filled]
|
||||
pub fn stars(&self) -> [bool; 5] {
|
||||
let r = self.rating.value();
|
||||
[r >= 1, r >= 2, r >= 3, r >= 4, r >= 5]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -270,3 +269,90 @@ impl User {
|
||||
&self.password_hash
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FeedEntry {
|
||||
entry: DiaryEntry,
|
||||
user_email: String,
|
||||
}
|
||||
|
||||
impl FeedEntry {
|
||||
pub fn new(entry: DiaryEntry, user_email: String) -> Self {
|
||||
Self { entry, user_email }
|
||||
}
|
||||
pub fn movie(&self) -> &Movie { self.entry.movie() }
|
||||
pub fn review(&self) -> &Review { self.entry.review() }
|
||||
pub fn user_email(&self) -> &str { &self.user_email }
|
||||
pub fn user_display_name(&self) -> &str {
|
||||
self.user_email.split('@').next().unwrap_or(&self.user_email)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UserSummary {
|
||||
pub user_id: UserId,
|
||||
pub email: String,
|
||||
pub total_movies: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
}
|
||||
|
||||
impl UserSummary {
|
||||
pub fn display_name(&self) -> &str {
|
||||
self.email.split('@').next().unwrap_or(&self.email)
|
||||
}
|
||||
pub fn avg_rating_display(&self) -> String {
|
||||
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "—".to_string())
|
||||
}
|
||||
pub fn initial(&self) -> char {
|
||||
self.display_name().chars().next().unwrap_or('?').to_ascii_uppercase()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UserStats {
|
||||
pub total_movies: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
pub favorite_director: Option<String>,
|
||||
pub most_active_month: Option<String>,
|
||||
}
|
||||
|
||||
impl UserStats {
|
||||
pub fn avg_rating_display(&self) -> String {
|
||||
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "—".to_string())
|
||||
}
|
||||
pub fn favorite_director_display(&self) -> &str {
|
||||
self.favorite_director.as_deref().unwrap_or("—")
|
||||
}
|
||||
pub fn most_active_month_display(&self) -> &str {
|
||||
self.most_active_month.as_deref().unwrap_or("—")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MonthActivity {
|
||||
pub year_month: String,
|
||||
pub month_label: String,
|
||||
pub count: i64,
|
||||
pub entries: Vec<DiaryEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MonthlyRating {
|
||||
pub year_month: String,
|
||||
pub month_label: String,
|
||||
pub avg_rating: f64,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DirectorStat {
|
||||
pub director: String,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UserTrends {
|
||||
pub monthly_ratings: Vec<MonthlyRating>,
|
||||
pub top_directors: Vec<DirectorStat>,
|
||||
pub max_director_count: i64,
|
||||
}
|
||||
|
||||
@@ -4,10 +4,14 @@ use chrono::{DateTime, Utc};
|
||||
use crate::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
||||
models::{
|
||||
DiaryEntry, DiaryFilter, FeedEntry, Movie, Review, ReviewHistory, User, UserStats,
|
||||
UserTrends, UserSummary,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
value_objects::{
|
||||
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||
ReleaseYear, UserId,
|
||||
ReleaseYear, ReviewId, UserId,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -32,6 +36,23 @@ pub trait MovieRepository: Send + Sync {
|
||||
-> Result<Paginated<DiaryEntry>, DomainError>;
|
||||
|
||||
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError>;
|
||||
|
||||
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError>;
|
||||
|
||||
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError>;
|
||||
|
||||
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError>;
|
||||
|
||||
async fn query_activity_feed(
|
||||
&self,
|
||||
page: &PageParams,
|
||||
) -> Result<Paginated<FeedEntry>, DomainError>;
|
||||
|
||||
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError>;
|
||||
|
||||
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError>;
|
||||
|
||||
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError>;
|
||||
}
|
||||
|
||||
pub enum MetadataSearchCriteria {
|
||||
@@ -82,6 +103,9 @@ pub trait AuthService: Send + Sync {
|
||||
pub trait UserRepository: Send + Sync {
|
||||
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError>;
|
||||
async fn save(&self, user: &User) -> Result<(), DomainError>;
|
||||
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError>;
|
||||
|
||||
async fn list_with_stats(&self) -> Result<Vec<UserSummary>, DomainError>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
||||
@@ -28,7 +28,9 @@ poster-storage = { workspace = true }
|
||||
sqlite = { workspace = true }
|
||||
sqlx = { workspace = true }
|
||||
template-askama = { workspace = true }
|
||||
event-publisher = { workspace = true }
|
||||
rss = { workspace = true }
|
||||
infer = "0.19.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tower = { version = "0.5", features = ["util"] }
|
||||
|
||||
@@ -1,6 +1,23 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use application::{commands::LogReviewCommand, queries::GetDiaryQuery};
|
||||
use domain::{errors::DomainError, models::SortDirection};
|
||||
|
||||
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
T: std::str::FromStr,
|
||||
T::Err: std::fmt::Display,
|
||||
{
|
||||
let s = Option::<String>::deserialize(de)?;
|
||||
match s.as_deref() {
|
||||
None | Some("") => Ok(None),
|
||||
Some(s) => s.parse::<T>().map(Some).map_err(serde::de::Error::custom),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DiaryQueryParams {
|
||||
pub limit: Option<u32>,
|
||||
@@ -11,15 +28,37 @@ pub struct DiaryQueryParams {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LogReviewForm {
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub external_metadata_id: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub manual_title: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub manual_release_year: Option<u16>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub manual_director: Option<String>,
|
||||
pub rating: u8,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub comment: Option<String>,
|
||||
pub watched_at: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LoginForm {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RegisterForm {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ErrorQuery {
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LogReviewRequest {
|
||||
pub external_metadata_id: Option<String>,
|
||||
@@ -89,10 +128,218 @@ pub struct RegisterRequest {
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
pub struct LogReviewData {
|
||||
pub external_metadata_id: Option<String>,
|
||||
pub manual_title: Option<String>,
|
||||
pub manual_release_year: Option<u16>,
|
||||
pub manual_director: Option<String>,
|
||||
pub rating: u8,
|
||||
pub comment: Option<String>,
|
||||
pub watched_at: NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParseReviewError {
|
||||
pub field: &'static str,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl TryFrom<LogReviewForm> for LogReviewData {
|
||||
type Error = ParseReviewError;
|
||||
|
||||
fn try_from(form: LogReviewForm) -> Result<Self, Self::Error> {
|
||||
let watched_at = NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M:%S")
|
||||
.or_else(|_| NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M"))
|
||||
.map_err(|_| ParseReviewError {
|
||||
field: "watched_at",
|
||||
message: format!(
|
||||
"invalid date '{}'; expected YYYY-MM-DDTHH:MM[:SS]",
|
||||
form.watched_at
|
||||
),
|
||||
})?;
|
||||
Ok(Self {
|
||||
external_metadata_id: form.external_metadata_id.filter(|s| !s.trim().is_empty()),
|
||||
manual_title: form.manual_title,
|
||||
manual_release_year: form.manual_release_year,
|
||||
manual_director: form.manual_director,
|
||||
rating: form.rating,
|
||||
comment: form.comment,
|
||||
watched_at,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<LogReviewRequest> for LogReviewData {
|
||||
type Error = DomainError;
|
||||
|
||||
fn try_from(req: LogReviewRequest) -> Result<Self, Self::Error> {
|
||||
let watched_at = NaiveDateTime::parse_from_str(&req.watched_at, "%Y-%m-%dT%H:%M:%S")
|
||||
.map_err(|_| {
|
||||
DomainError::ValidationError(
|
||||
"invalid watched_at; expected YYYY-MM-DDTHH:MM:SS".into(),
|
||||
)
|
||||
})?;
|
||||
Ok(Self {
|
||||
external_metadata_id: req.external_metadata_id.filter(|s| !s.trim().is_empty()),
|
||||
manual_title: req.manual_title,
|
||||
manual_release_year: req.manual_release_year,
|
||||
manual_director: req.manual_director,
|
||||
rating: req.rating,
|
||||
comment: req.comment,
|
||||
watched_at,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl LogReviewData {
|
||||
pub fn into_command(self, user_id: Uuid) -> LogReviewCommand {
|
||||
LogReviewCommand {
|
||||
external_metadata_id: self.external_metadata_id,
|
||||
manual_title: self.manual_title,
|
||||
manual_release_year: self.manual_release_year,
|
||||
manual_director: self.manual_director,
|
||||
rating: self.rating,
|
||||
comment: self.comment,
|
||||
watched_at: self.watched_at,
|
||||
user_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DiaryQueryParams> for GetDiaryQuery {
|
||||
fn from(p: DiaryQueryParams) -> Self {
|
||||
GetDiaryQuery {
|
||||
limit: p.limit,
|
||||
offset: p.offset,
|
||||
sort_by: p.sort_by.as_deref().map(|s| {
|
||||
if s == "asc" {
|
||||
SortDirection::Ascending
|
||||
} else {
|
||||
SortDirection::Descending
|
||||
}
|
||||
}),
|
||||
movie_id: p.movie_id,
|
||||
user_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Default)]
|
||||
pub struct ProfileQueryParams {
|
||||
pub view: Option<String>,
|
||||
pub limit: Option<u32>,
|
||||
pub offset: Option<u32>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_form(watched_at: &str) -> LogReviewForm {
|
||||
LogReviewForm {
|
||||
external_metadata_id: None,
|
||||
manual_title: None,
|
||||
manual_release_year: None,
|
||||
manual_director: None,
|
||||
rating: 4,
|
||||
comment: None,
|
||||
watched_at: watched_at.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_request(watched_at: &str) -> LogReviewRequest {
|
||||
LogReviewRequest {
|
||||
external_metadata_id: None,
|
||||
manual_title: None,
|
||||
manual_release_year: None,
|
||||
manual_director: None,
|
||||
rating: 4,
|
||||
comment: None,
|
||||
watched_at: watched_at.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn form_accepts_datetime_with_seconds() {
|
||||
let data = LogReviewData::try_from(make_form("2024-03-15T20:30:00")).unwrap();
|
||||
assert_eq!(data.watched_at.format("%H:%M:%S").to_string(), "20:30:00");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn form_accepts_datetime_without_seconds() {
|
||||
let data = LogReviewData::try_from(make_form("2024-03-15T20:30")).unwrap();
|
||||
assert_eq!(data.watched_at.format("%H:%M").to_string(), "20:30");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn form_rejects_invalid_datetime() {
|
||||
assert!(LogReviewData::try_from(make_form("not-a-date")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn api_accepts_datetime_with_seconds() {
|
||||
let data = LogReviewData::try_from(make_request("2024-03-15T20:30:00")).unwrap();
|
||||
assert_eq!(data.watched_at.format("%H:%M:%S").to_string(), "20:30:00");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn api_rejects_datetime_without_seconds() {
|
||||
assert!(LogReviewData::try_from(make_request("2024-03-15T20:30")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn api_rejects_invalid_datetime() {
|
||||
assert!(LogReviewData::try_from(make_request("garbage")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn whitespace_external_id_becomes_none_in_form() {
|
||||
let mut form = make_form("2024-03-15T20:30:00");
|
||||
form.external_metadata_id = Some(" ".to_string());
|
||||
let data = LogReviewData::try_from(form).unwrap();
|
||||
assert!(data.external_metadata_id.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn whitespace_external_id_becomes_none_in_request() {
|
||||
let mut req = make_request("2024-03-15T20:30:00");
|
||||
req.external_metadata_id = Some(" ".to_string());
|
||||
let data = LogReviewData::try_from(req).unwrap();
|
||||
assert!(data.external_metadata_id.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn into_command_sets_user_id() {
|
||||
let data = LogReviewData::try_from(make_form("2024-03-15T20:30:00")).unwrap();
|
||||
let user_id = Uuid::new_v4();
|
||||
let cmd = data.into_command(user_id);
|
||||
assert_eq!(cmd.user_id, user_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sort_by_asc_string_becomes_ascending() {
|
||||
let params = DiaryQueryParams {
|
||||
sort_by: Some("asc".to_string()),
|
||||
limit: None,
|
||||
offset: None,
|
||||
movie_id: None,
|
||||
};
|
||||
let query = GetDiaryQuery::from(params);
|
||||
assert!(matches!(query.sort_by, Some(domain::models::SortDirection::Ascending)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sort_by_other_string_becomes_descending() {
|
||||
let params = DiaryQueryParams {
|
||||
sort_by: Some("desc".to_string()),
|
||||
limit: None,
|
||||
offset: None,
|
||||
movie_id: None,
|
||||
};
|
||||
let query = GetDiaryQuery::from(params);
|
||||
assert!(matches!(query.sort_by, Some(domain::models::SortDirection::Descending)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diary_response_serializes_correctly() {
|
||||
let resp = DiaryResponse {
|
||||
|
||||
179
crates/presentation/src/event_handlers.rs
Normal file
179
crates/presentation/src/event_handlers.rs
Normal file
@@ -0,0 +1,179 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, events::DomainEvent};
|
||||
use event_publisher::EventHandler;
|
||||
|
||||
pub struct PosterSyncHandler {
|
||||
ctx: AppContext,
|
||||
max_retries: u32,
|
||||
}
|
||||
|
||||
impl PosterSyncHandler {
|
||||
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
|
||||
Self { ctx, max_retries }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventHandler for PosterSyncHandler {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
let (movie_id, external_metadata_id) = match event {
|
||||
DomainEvent::MovieDiscovered {
|
||||
movie_id,
|
||||
external_metadata_id,
|
||||
} => (movie_id.value(), external_metadata_id.value().to_owned()),
|
||||
_ => return Ok(()),
|
||||
};
|
||||
|
||||
let mut last_err: Option<DomainError> = None;
|
||||
for attempt in 0..=self.max_retries {
|
||||
let cmd = SyncPosterCommand {
|
||||
movie_id,
|
||||
external_metadata_id: external_metadata_id.clone(),
|
||||
};
|
||||
match sync_poster::execute(&self.ctx, cmd).await {
|
||||
Ok(()) => return Ok(()),
|
||||
Err(e) => {
|
||||
if attempt < self.max_retries {
|
||||
let delay = Duration::from_secs(2u64.pow(attempt));
|
||||
tracing::warn!(
|
||||
attempt = attempt + 1,
|
||||
max_attempts = self.max_retries + 1,
|
||||
delay_secs = delay.as_secs(),
|
||||
"poster sync failed, retrying: {e}"
|
||||
);
|
||||
tokio::time::sleep(delay).await;
|
||||
}
|
||||
last_err = Some(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let err = last_err.expect("loop runs at least once and always sets last_err on Err");
|
||||
tracing::error!(
|
||||
attempts = self.max_retries + 1,
|
||||
"poster sync failed after all attempts: {err}"
|
||||
);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::sync::Arc;
|
||||
use application::config::AppConfig;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
||||
ports::{
|
||||
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
|
||||
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
|
||||
},
|
||||
value_objects::{
|
||||
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||
Rating, ReleaseYear, ReviewId, UserId,
|
||||
},
|
||||
};
|
||||
|
||||
// Panic-stub ports: each method panics so any accidental dispatch into a service
|
||||
// fails the test loudly rather than silently succeeding.
|
||||
struct PanicRepo;
|
||||
struct PanicMetadata;
|
||||
struct PanicFetcher;
|
||||
struct PanicStorage;
|
||||
struct PanicAuth;
|
||||
struct PanicHasher;
|
||||
struct PanicUserRepo;
|
||||
struct NoopPublisher;
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for PanicRepo {
|
||||
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
|
||||
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
|
||||
async fn get_review_by_id(&self, _: &ReviewId) -> Result<Option<Review>, DomainError> { panic!("unexpected") }
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &UserId) -> Result<Vec<DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for PanicMetadata {
|
||||
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
|
||||
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterFetcherClient for PanicFetcher {
|
||||
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterStorage for PanicStorage {
|
||||
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
|
||||
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AuthService for PanicAuth {
|
||||
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
|
||||
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PasswordHasher for PanicHasher {
|
||||
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
|
||||
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl UserRepository for PanicUserRepo {
|
||||
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for NoopPublisher {
|
||||
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
|
||||
}
|
||||
|
||||
fn panic_ctx() -> AppContext {
|
||||
AppContext {
|
||||
repository: Arc::new(PanicRepo),
|
||||
metadata_client: Arc::new(PanicMetadata),
|
||||
poster_fetcher: Arc::new(PanicFetcher),
|
||||
poster_storage: Arc::new(PanicStorage),
|
||||
event_publisher: Arc::new(NoopPublisher),
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(PanicUserRepo),
|
||||
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_logged_is_ignored() {
|
||||
let handler = PosterSyncHandler::new(panic_ctx(), 3);
|
||||
let event = DomainEvent::ReviewLogged {
|
||||
review_id: ReviewId::generate(),
|
||||
movie_id: MovieId::generate(),
|
||||
user_id: UserId::generate(),
|
||||
rating: Rating::new(4).unwrap(),
|
||||
watched_at: chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap().and_hms_opt(0, 0, 0).unwrap(),
|
||||
};
|
||||
assert!(handler.handle(&event).await.is_ok());
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
use axum::{
|
||||
extract::{FromRef, FromRequestParts},
|
||||
http::{header::AUTHORIZATION, request::Parts},
|
||||
http::{header, header::AUTHORIZATION, request::Parts},
|
||||
response::{IntoResponse, Redirect},
|
||||
};
|
||||
use domain::{errors::DomainError, value_objects::UserId};
|
||||
|
||||
@@ -36,6 +37,64 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OptionalCookieUser(pub Option<UserId>);
|
||||
pub struct RequiredCookieUser(pub UserId);
|
||||
|
||||
fn extract_token_from_cookie(parts: &Parts) -> Option<String> {
|
||||
parts
|
||||
.headers
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|cookies| {
|
||||
cookies
|
||||
.split(';')
|
||||
.find_map(|c| c.trim().strip_prefix("token=").map(str::to_string))
|
||||
})
|
||||
}
|
||||
|
||||
impl<S> FromRequestParts<S> for OptionalCookieUser
|
||||
where
|
||||
AppState: FromRef<S>,
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = std::convert::Infallible;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let app_state = AppState::from_ref(state);
|
||||
let Some(token) = extract_token_from_cookie(parts) else {
|
||||
return Ok(OptionalCookieUser(None));
|
||||
};
|
||||
let user_id = app_state
|
||||
.app_ctx
|
||||
.auth_service
|
||||
.validate_token(&token)
|
||||
.await
|
||||
.ok();
|
||||
Ok(OptionalCookieUser(user_id))
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> FromRequestParts<S> for RequiredCookieUser
|
||||
where
|
||||
AppState: FromRef<S>,
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = axum::response::Response;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let app_state = AppState::from_ref(state);
|
||||
let token = extract_token_from_cookie(parts)
|
||||
.ok_or_else(|| Redirect::to("/login").into_response())?;
|
||||
let user_id = app_state
|
||||
.app_ctx
|
||||
.auth_service
|
||||
.validate_token(&token)
|
||||
.await
|
||||
.map_err(|_| Redirect::to("/login").into_response())?;
|
||||
Ok(RequiredCookieUser(user_id))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -72,16 +131,29 @@ mod tests {
|
||||
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
|
||||
}
|
||||
|
||||
struct PanicRenderer;
|
||||
impl crate::ports::HtmlRenderer for PanicRenderer {
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>) -> Result<String, String> { panic!() }
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
|
||||
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
|
||||
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
|
||||
}
|
||||
|
||||
struct PanicRssRenderer;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() }
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
|
||||
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher; struct PanicAuth; struct PanicUserRepo;
|
||||
@@ -91,7 +163,7 @@ mod tests {
|
||||
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
|
||||
|
||||
let state = crate::state::AppState {
|
||||
app_ctx: AppContext {
|
||||
@@ -103,7 +175,7 @@ mod tests {
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(PanicUserRepo),
|
||||
config: application::config::AppConfig { allow_registration: false },
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer),
|
||||
rss_renderer: Arc::new(PanicRssRenderer),
|
||||
@@ -122,4 +194,215 @@ mod tests {
|
||||
|
||||
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// Reusable helpers for cookie extractor tests
|
||||
async fn optional_cookie_handler(user: OptionalCookieUser) -> String {
|
||||
match user.0 {
|
||||
Some(id) => id.value().to_string(),
|
||||
None => "none".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn required_cookie_handler(user: RequiredCookieUser) -> String {
|
||||
user.0.value().to_string()
|
||||
}
|
||||
|
||||
fn test_router_optional(state: crate::state::AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/optional", get(optional_cookie_handler))
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
fn test_router_required(state: crate::state::AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/required", get(required_cookie_handler))
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
struct RejectingAuth;
|
||||
#[async_trait::async_trait]
|
||||
impl domain::ports::AuthService for RejectingAuth {
|
||||
async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() }
|
||||
async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> {
|
||||
Err(domain::errors::DomainError::Unauthorized("bad token".into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn panic_state() -> crate::state::AppState {
|
||||
use std::sync::Arc;
|
||||
use application::context::AppContext;
|
||||
struct PanicRepo2;
|
||||
#[async_trait::async_trait]
|
||||
impl domain::ports::MovieRepository for PanicRepo2 {
|
||||
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
|
||||
}
|
||||
struct PanicMeta2; struct PanicFetcher2; struct PanicStorage2; struct PanicEvent2; struct PanicHasher2; struct PanicUserRepo2;
|
||||
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta2 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher2 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage2 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent2 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher2 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth2 { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo2 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
|
||||
struct PanicRenderer2;
|
||||
impl crate::ports::HtmlRenderer for PanicRenderer2 {
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
|
||||
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
|
||||
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
|
||||
}
|
||||
struct PanicRssRenderer2;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer2 {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
struct PanicAuth2;
|
||||
crate::state::AppState {
|
||||
app_ctx: AppContext {
|
||||
repository: Arc::new(PanicRepo2),
|
||||
metadata_client: Arc::new(PanicMeta2),
|
||||
poster_fetcher: Arc::new(PanicFetcher2),
|
||||
poster_storage: Arc::new(PanicStorage2),
|
||||
event_publisher: Arc::new(PanicEvent2),
|
||||
auth_service: Arc::new(PanicAuth2),
|
||||
password_hasher: Arc::new(PanicHasher2),
|
||||
user_repository: Arc::new(PanicUserRepo2),
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer2),
|
||||
rss_renderer: Arc::new(PanicRssRenderer2),
|
||||
}
|
||||
}
|
||||
|
||||
fn rejecting_state() -> crate::state::AppState {
|
||||
use std::sync::Arc;
|
||||
use application::context::AppContext;
|
||||
struct PanicRepo3;
|
||||
#[async_trait::async_trait]
|
||||
impl domain::ports::MovieRepository for PanicRepo3 {
|
||||
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
|
||||
}
|
||||
struct PanicMeta3; struct PanicFetcher3; struct PanicStorage3; struct PanicEvent3; struct PanicHasher3; struct PanicUserRepo3;
|
||||
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta3 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher3 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage3 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent3 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher3 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo3 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
|
||||
struct PanicRenderer3;
|
||||
impl crate::ports::HtmlRenderer for PanicRenderer3 {
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
|
||||
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
|
||||
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
|
||||
}
|
||||
struct PanicRssRenderer3;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer3 {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
crate::state::AppState {
|
||||
app_ctx: AppContext {
|
||||
repository: Arc::new(PanicRepo3),
|
||||
metadata_client: Arc::new(PanicMeta3),
|
||||
poster_fetcher: Arc::new(PanicFetcher3),
|
||||
poster_storage: Arc::new(PanicStorage3),
|
||||
event_publisher: Arc::new(PanicEvent3),
|
||||
auth_service: Arc::new(RejectingAuth),
|
||||
password_hasher: Arc::new(PanicHasher3),
|
||||
user_repository: Arc::new(PanicUserRepo3),
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer3),
|
||||
rss_renderer: Arc::new(PanicRssRenderer3),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn optional_cookie_user_returns_none_without_cookie() {
|
||||
let app = test_router_optional(panic_state());
|
||||
let response = app
|
||||
.oneshot(Request::builder().uri("/optional").body(Body::empty()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
|
||||
assert_eq!(&body[..], b"none");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn optional_cookie_user_returns_none_with_invalid_token() {
|
||||
let app = test_router_optional(rejecting_state());
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.uri("/optional")
|
||||
.header("cookie", "token=bad.token.here")
|
||||
.body(Body::empty())
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
|
||||
assert_eq!(&body[..], b"none");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn required_cookie_user_redirects_without_cookie() {
|
||||
let app = test_router_required(panic_state());
|
||||
let response = app
|
||||
.oneshot(Request::builder().uri("/required").body(Body::empty()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::SEE_OTHER);
|
||||
assert_eq!(response.headers().get("location").unwrap(), "/login");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn required_cookie_user_redirects_with_invalid_token() {
|
||||
let app = test_router_required(rejecting_state());
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.uri("/required")
|
||||
.header("cookie", "token=bad.token.here")
|
||||
.body(Body::empty())
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::SEE_OTHER);
|
||||
assert_eq!(response.headers().get("location").unwrap(), "/login");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,106 +1,443 @@
|
||||
const DEFAULT_PAGE_LIMIT: u32 = 5;
|
||||
const RSS_FEED_LIMIT: u32 = 50;
|
||||
|
||||
pub mod html {
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
extract::{Path, Query, State},
|
||||
http::{HeaderValue, StatusCode, header::SET_COOKIE},
|
||||
response::{Html, IntoResponse, Redirect},
|
||||
Form,
|
||||
};
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::Utc;
|
||||
use uuid::Uuid;
|
||||
|
||||
use application::{
|
||||
commands::LogReviewCommand,
|
||||
queries::GetDiaryQuery,
|
||||
use_cases::{get_diary, log_review},
|
||||
commands::{DeleteReviewCommand, LoginCommand, RegisterCommand},
|
||||
ports::{HtmlPageContext, LoginPageData, NewReviewPageData, RegisterPageData},
|
||||
use_cases::{delete_review, log_review, login as login_uc, register as register_uc},
|
||||
};
|
||||
use domain::{errors::DomainError, models::SortDirection};
|
||||
use domain::{errors::DomainError, value_objects::UserId};
|
||||
|
||||
use crate::{
|
||||
dtos::{DiaryQueryParams, LogReviewForm},
|
||||
errors::ApiError,
|
||||
extractors::AuthenticatedUser,
|
||||
dtos::{DiaryQueryParams, ErrorQuery, LoginForm, LogReviewData, LogReviewForm, RegisterForm},
|
||||
extractors::{OptionalCookieUser, RequiredCookieUser},
|
||||
state::AppState,
|
||||
};
|
||||
|
||||
pub async fn get_diary_page(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<DiaryQueryParams>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let query = GetDiaryQuery {
|
||||
limit: params.limit,
|
||||
offset: params.offset,
|
||||
sort_by: params.sort_by.as_deref().map(|s| {
|
||||
if s == "asc" {
|
||||
SortDirection::Ascending
|
||||
async fn build_page_context(state: &AppState, user_id: Option<UserId>) -> HtmlPageContext {
|
||||
let uuid = user_id.as_ref().map(|u| u.value());
|
||||
let user_email = if let Some(ref id) = user_id {
|
||||
state
|
||||
.app_ctx
|
||||
.user_repository
|
||||
.find_by_id(id)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|u| u.email().value().to_string())
|
||||
} else {
|
||||
SortDirection::Descending
|
||||
}
|
||||
}),
|
||||
movie_id: params.movie_id,
|
||||
None
|
||||
};
|
||||
HtmlPageContext {
|
||||
user_email,
|
||||
user_id: uuid,
|
||||
register_enabled: state.app_ctx.config.allow_registration,
|
||||
rss_url: "/feed.rss".to_string(),
|
||||
page_title: "Movies Diary".to_string(),
|
||||
canonical_url: state.app_ctx.config.base_url.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
let page = get_diary::execute(&state.app_ctx, query).await?;
|
||||
fn encode_error(msg: &str) -> String {
|
||||
msg.replace(' ', "+")
|
||||
.replace('&', "%26")
|
||||
.replace('=', "%3D")
|
||||
.replace('"', "%22")
|
||||
}
|
||||
|
||||
fn secure_flag() -> &'static str {
|
||||
if std::env::var("SECURE_COOKIES").as_deref() == Ok("true") { "; Secure" } else { "" }
|
||||
}
|
||||
|
||||
fn set_cookie_header(token: &str, max_age: i64) -> (axum::http::HeaderName, HeaderValue) {
|
||||
let val = format!(
|
||||
"token={}; HttpOnly; Path=/; SameSite=Strict; Max-Age={}{}",
|
||||
token, max_age, secure_flag()
|
||||
);
|
||||
(SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"))
|
||||
}
|
||||
|
||||
pub async fn get_login_page(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ErrorQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let ctx = HtmlPageContext {
|
||||
user_email: None,
|
||||
user_id: None,
|
||||
register_enabled: state.app_ctx.config.allow_registration,
|
||||
rss_url: "/feed.rss".to_string(),
|
||||
page_title: "Login — Movies Diary".to_string(),
|
||||
canonical_url: format!("{}/login", state.app_ctx.config.base_url),
|
||||
};
|
||||
let html = state
|
||||
.html_renderer
|
||||
.render_diary_page(&page)
|
||||
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
|
||||
.render_login_page(LoginPageData {
|
||||
ctx,
|
||||
error: params.error.as_deref(),
|
||||
})
|
||||
.expect("login template failed");
|
||||
Html(html)
|
||||
}
|
||||
|
||||
Ok(Html(html))
|
||||
pub async fn post_login(
|
||||
State(state): State<AppState>,
|
||||
Form(form): Form<LoginForm>,
|
||||
) -> impl IntoResponse {
|
||||
match login_uc::execute(
|
||||
&state.app_ctx,
|
||||
LoginCommand {
|
||||
email: form.email,
|
||||
password: form.password,
|
||||
},
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
let max_age = (result.expires_at - Utc::now()).num_seconds().max(0);
|
||||
let cookie = set_cookie_header(&result.token, max_age);
|
||||
([cookie], Redirect::to("/")).into_response()
|
||||
}
|
||||
Err(_) => Redirect::to("/login?error=Invalid+credentials").into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_logout() -> impl IntoResponse {
|
||||
let val = format!("token=; HttpOnly; Path=/; SameSite=Strict; Max-Age=0{}", secure_flag());
|
||||
let cookie = (SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"));
|
||||
([cookie], Redirect::to("/")).into_response()
|
||||
}
|
||||
|
||||
pub async fn get_register_page(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ErrorQuery>,
|
||||
) -> impl IntoResponse {
|
||||
if !state.app_ctx.config.allow_registration {
|
||||
return Redirect::to("/").into_response();
|
||||
}
|
||||
let ctx = HtmlPageContext {
|
||||
user_email: None,
|
||||
user_id: None,
|
||||
register_enabled: true,
|
||||
rss_url: "/feed.rss".to_string(),
|
||||
page_title: "Register — Movies Diary".to_string(),
|
||||
canonical_url: format!("{}/register", state.app_ctx.config.base_url),
|
||||
};
|
||||
let html = state
|
||||
.html_renderer
|
||||
.render_register_page(RegisterPageData {
|
||||
ctx,
|
||||
error: params.error.as_deref(),
|
||||
})
|
||||
.expect("register template failed");
|
||||
Html(html).into_response()
|
||||
}
|
||||
|
||||
pub async fn post_register(
|
||||
State(state): State<AppState>,
|
||||
Form(form): Form<RegisterForm>,
|
||||
) -> impl IntoResponse {
|
||||
if !state.app_ctx.config.allow_registration {
|
||||
return Redirect::to("/").into_response();
|
||||
}
|
||||
let email = form.email.clone();
|
||||
let password = form.password.clone();
|
||||
match register_uc::execute(
|
||||
&state.app_ctx,
|
||||
RegisterCommand {
|
||||
email: form.email,
|
||||
password: form.password,
|
||||
},
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
match login_uc::execute(&state.app_ctx, LoginCommand { email, password }).await {
|
||||
Ok(result) => {
|
||||
let max_age = (result.expires_at - Utc::now()).num_seconds().max(0);
|
||||
let cookie = set_cookie_header(&result.token, max_age);
|
||||
([cookie], Redirect::to("/")).into_response()
|
||||
}
|
||||
Err(_) => Redirect::to("/login").into_response(),
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
Redirect::to("/register?error=Registration+failed.+Please+try+again.").into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_new_review_page(
|
||||
RequiredCookieUser(user_id): RequiredCookieUser,
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ErrorQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let mut ctx = build_page_context(&state, Some(user_id)).await;
|
||||
ctx.page_title = "Log a Review — Movies Diary".to_string();
|
||||
ctx.canonical_url = format!("{}/reviews/new", state.app_ctx.config.base_url);
|
||||
let html = state
|
||||
.html_renderer
|
||||
.render_new_review_page(NewReviewPageData {
|
||||
ctx,
|
||||
error: params.error.as_deref(),
|
||||
})
|
||||
.expect("new_review template failed");
|
||||
Html(html)
|
||||
}
|
||||
|
||||
pub async fn post_review(
|
||||
State(state): State<AppState>,
|
||||
user: AuthenticatedUser,
|
||||
RequiredCookieUser(user_id): RequiredCookieUser,
|
||||
Form(form): Form<LogReviewForm>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let watched_at = NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M:%S")
|
||||
.map_err(|_| {
|
||||
ApiError(DomainError::ValidationError(
|
||||
"Invalid watched_at format, expected YYYY-MM-DDTHH:MM:SS".into(),
|
||||
))
|
||||
})?;
|
||||
|
||||
let cmd = LogReviewCommand {
|
||||
external_metadata_id: form.external_metadata_id,
|
||||
manual_title: form.manual_title,
|
||||
manual_release_year: form.manual_release_year,
|
||||
manual_director: form.manual_director,
|
||||
user_id: user.0.value(),
|
||||
rating: form.rating,
|
||||
comment: form.comment,
|
||||
watched_at,
|
||||
) -> impl IntoResponse {
|
||||
let data = match LogReviewData::try_from(form) {
|
||||
Ok(d) => d,
|
||||
Err(_) => {
|
||||
return Redirect::to("/reviews/new?error=Invalid+date+format").into_response()
|
||||
}
|
||||
};
|
||||
|
||||
log_review::execute(&state.app_ctx, cmd).await?;
|
||||
match log_review::execute(&state.app_ctx, data.into_command(user_id.value())).await {
|
||||
Ok(_) => Redirect::to("/").into_response(),
|
||||
Err(e) => {
|
||||
let msg = encode_error(&e.to_string());
|
||||
Redirect::to(&format!("/reviews/new?error={}", msg)).into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Redirect::to("/diary"))
|
||||
pub async fn post_delete_review(
|
||||
State(state): State<AppState>,
|
||||
RequiredCookieUser(user_id): RequiredCookieUser,
|
||||
Path(review_id): Path<Uuid>,
|
||||
) -> impl IntoResponse {
|
||||
let cmd = DeleteReviewCommand {
|
||||
review_id,
|
||||
requesting_user_id: user_id.value(),
|
||||
};
|
||||
match delete_review::execute(&state.app_ctx, cmd).await {
|
||||
Ok(()) => Redirect::to("/").into_response(),
|
||||
Err(DomainError::NotFound(_)) => StatusCode::NOT_FOUND.into_response(),
|
||||
Err(DomainError::Unauthorized(_)) => StatusCode::FORBIDDEN.into_response(),
|
||||
Err(e) => {
|
||||
tracing::error!("delete_review html error: {:?}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_activity_feed(
|
||||
OptionalCookieUser(user_id): OptionalCookieUser,
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<DiaryQueryParams>,
|
||||
) -> impl IntoResponse {
|
||||
let ctx = build_page_context(&state, user_id).await;
|
||||
let query = application::queries::GetActivityFeedQuery {
|
||||
limit: params.limit,
|
||||
offset: params.offset,
|
||||
};
|
||||
match application::use_cases::get_activity_feed::execute(&state.app_ctx, query).await {
|
||||
Ok(entries) => {
|
||||
let limit = entries.limit;
|
||||
let offset = entries.offset;
|
||||
let has_more = (offset as u64).saturating_add(limit as u64) < entries.total_count;
|
||||
let data = application::ports::ActivityFeedPageData {
|
||||
ctx,
|
||||
current_offset: offset,
|
||||
has_more,
|
||||
limit,
|
||||
entries,
|
||||
};
|
||||
match state.html_renderer.render_activity_feed_page(data) {
|
||||
Ok(html) => Html(html).into_response(),
|
||||
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e).into_response(),
|
||||
}
|
||||
}
|
||||
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_users_list(
|
||||
OptionalCookieUser(user_id): OptionalCookieUser,
|
||||
State(state): State<AppState>,
|
||||
) -> impl IntoResponse {
|
||||
let mut ctx = build_page_context(&state, user_id).await;
|
||||
ctx.page_title = "Members — Movies Diary".to_string();
|
||||
ctx.canonical_url = format!("{}/users", state.app_ctx.config.base_url);
|
||||
match application::use_cases::get_users::execute(&state.app_ctx, application::queries::GetUsersQuery).await {
|
||||
Ok(users) => {
|
||||
let data = application::ports::UsersPageData { ctx, users };
|
||||
match state.html_renderer.render_users_page(data) {
|
||||
Ok(html) => Html(html).into_response(),
|
||||
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e).into_response(),
|
||||
}
|
||||
}
|
||||
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_user_profile(
|
||||
OptionalCookieUser(user_id): OptionalCookieUser,
|
||||
State(state): State<AppState>,
|
||||
Path(profile_user_uuid): Path<Uuid>,
|
||||
Query(params): Query<crate::dtos::ProfileQueryParams>,
|
||||
) -> impl IntoResponse {
|
||||
let mut ctx = build_page_context(&state, user_id).await;
|
||||
let view = params.view.unwrap_or_else(|| "recent".to_string());
|
||||
|
||||
let profile_user = match state.app_ctx.user_repository
|
||||
.find_by_id(&domain::value_objects::UserId::from_uuid(profile_user_uuid))
|
||||
.await
|
||||
{
|
||||
Ok(Some(u)) => u,
|
||||
Ok(None) => return (StatusCode::NOT_FOUND, "User not found").into_response(),
|
||||
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
||||
};
|
||||
|
||||
let display_name = profile_user.email().value()
|
||||
.split('@').next().unwrap_or("User");
|
||||
ctx.page_title = format!("{}'s Diary — Movies Diary", display_name);
|
||||
ctx.canonical_url = format!("{}/users/{}", state.app_ctx.config.base_url, profile_user_uuid);
|
||||
|
||||
let query = application::queries::GetUserProfileQuery {
|
||||
user_id: profile_user_uuid,
|
||||
view: view.clone(),
|
||||
limit: params.limit,
|
||||
offset: params.offset,
|
||||
};
|
||||
|
||||
match application::use_cases::get_user_profile::execute(&state.app_ctx, query).await {
|
||||
Ok(profile) => {
|
||||
let (offset, has_more, limit) = profile.entries.as_ref()
|
||||
.map(|e| {
|
||||
let has_more = (e.offset as u64).saturating_add(e.limit as u64) < e.total_count;
|
||||
(e.offset, has_more, e.limit)
|
||||
})
|
||||
.unwrap_or((0, false, super::DEFAULT_PAGE_LIMIT));
|
||||
ctx.rss_url = format!("/users/{}/feed.rss", profile_user_uuid);
|
||||
let data = application::ports::ProfilePageData {
|
||||
ctx,
|
||||
profile_user_id: profile_user_uuid,
|
||||
profile_user_email: profile_user.email().value().to_string(),
|
||||
stats: profile.stats,
|
||||
view,
|
||||
entries: profile.entries,
|
||||
current_offset: offset,
|
||||
has_more,
|
||||
limit,
|
||||
history: profile.history,
|
||||
trends: profile.trends,
|
||||
};
|
||||
match state.html_renderer.render_profile_page(data) {
|
||||
Ok(html) => Html(html).into_response(),
|
||||
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e).into_response(),
|
||||
}
|
||||
}
|
||||
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod posters {
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::{StatusCode, header},
|
||||
response::IntoResponse,
|
||||
};
|
||||
|
||||
use domain::value_objects::PosterPath;
|
||||
|
||||
use crate::state::AppState;
|
||||
|
||||
pub async fn get_poster(
|
||||
State(state): State<AppState>,
|
||||
Path(path): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
let poster_path = match PosterPath::new(path) {
|
||||
Ok(p) => p,
|
||||
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
||||
};
|
||||
match state.app_ctx.poster_storage.get_poster(&poster_path).await {
|
||||
Ok(bytes) => {
|
||||
let mime = infer::get(&bytes)
|
||||
.map(|t| t.mime_type())
|
||||
.unwrap_or("application/octet-stream");
|
||||
([(header::CONTENT_TYPE, mime)], bytes).into_response()
|
||||
}
|
||||
Err(_) => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod rss {
|
||||
use axum::{
|
||||
extract::State,
|
||||
extract::{Path, State},
|
||||
http::header,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
use application::{queries::GetDiaryQuery, use_cases::get_diary};
|
||||
use domain::{errors::DomainError, models::SortDirection};
|
||||
use domain::{errors::DomainError, models::SortDirection, value_objects::UserId};
|
||||
|
||||
use crate::{errors::ApiError, state::AppState};
|
||||
|
||||
pub async fn get_feed(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> {
|
||||
let query = GetDiaryQuery {
|
||||
limit: Some(50),
|
||||
limit: Some(super::RSS_FEED_LIMIT),
|
||||
offset: Some(0),
|
||||
sort_by: Some(SortDirection::Descending),
|
||||
movie_id: None,
|
||||
user_id: None,
|
||||
};
|
||||
let page = get_diary::execute(&state.app_ctx, query).await?;
|
||||
let xml = state
|
||||
.rss_renderer
|
||||
.render_feed(&page.items)
|
||||
.render_feed(&page.items, "Movie Diary")
|
||||
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
|
||||
Ok(([(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml))
|
||||
}
|
||||
|
||||
pub async fn get_user_feed(
|
||||
State(state): State<AppState>,
|
||||
Path(user_id): Path<Uuid>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let user = state
|
||||
.app_ctx
|
||||
.user_repository
|
||||
.find_by_id(&UserId::from_uuid(user_id))
|
||||
.await
|
||||
.map_err(ApiError)?
|
||||
.ok_or_else(|| ApiError(DomainError::NotFound(format!("User {user_id}"))))?;
|
||||
|
||||
let query = GetDiaryQuery {
|
||||
limit: Some(super::RSS_FEED_LIMIT),
|
||||
offset: Some(0),
|
||||
sort_by: Some(SortDirection::Descending),
|
||||
movie_id: None,
|
||||
user_id: Some(user_id),
|
||||
};
|
||||
let page = get_diary::execute(&state.app_ctx, query).await?;
|
||||
|
||||
let display_name = user.email().value().split('@').next().unwrap_or("User");
|
||||
let title = format!("{}'s Movie Diary", display_name);
|
||||
|
||||
let xml = state
|
||||
.rss_renderer
|
||||
.render_feed(&page.items, &title)
|
||||
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml))
|
||||
}
|
||||
}
|
||||
|
||||
pub mod api {
|
||||
@@ -110,17 +447,16 @@ pub mod api {
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use chrono::NaiveDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use application::{
|
||||
commands::{LoginCommand, LogReviewCommand, RegisterCommand, SyncPosterCommand},
|
||||
queries::{GetDiaryQuery, GetReviewHistoryQuery},
|
||||
use_cases::{get_diary, get_review_history, log_review, login as login_uc, register as register_uc, sync_poster},
|
||||
commands::{DeleteReviewCommand, LoginCommand, RegisterCommand, SyncPosterCommand},
|
||||
queries::GetReviewHistoryQuery,
|
||||
use_cases::{delete_review, get_diary, get_review_history, log_review, login as login_uc, register as register_uc, sync_poster},
|
||||
};
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::{DiaryEntry, Movie, Review, SortDirection},
|
||||
models::{DiaryEntry, Movie, Review},
|
||||
services::review_history::Trend,
|
||||
value_objects::MovieId,
|
||||
};
|
||||
@@ -128,7 +464,8 @@ pub mod api {
|
||||
use crate::{
|
||||
dtos::{
|
||||
DiaryEntryDto, DiaryQueryParams, DiaryResponse, LoginRequest, LoginResponse,
|
||||
LogReviewRequest, MovieDto, RegisterRequest, ReviewDto, ReviewHistoryResponse,
|
||||
LogReviewData, LogReviewRequest, MovieDto, RegisterRequest, ReviewDto,
|
||||
ReviewHistoryResponse,
|
||||
},
|
||||
errors::ApiError,
|
||||
extractors::AuthenticatedUser,
|
||||
@@ -139,20 +476,7 @@ pub mod api {
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<DiaryQueryParams>,
|
||||
) -> Result<Json<DiaryResponse>, ApiError> {
|
||||
let query = GetDiaryQuery {
|
||||
limit: params.limit,
|
||||
offset: params.offset,
|
||||
sort_by: params.sort_by.as_deref().map(|s| {
|
||||
if s == "asc" {
|
||||
SortDirection::Ascending
|
||||
} else {
|
||||
SortDirection::Descending
|
||||
}
|
||||
}),
|
||||
movie_id: params.movie_id,
|
||||
};
|
||||
|
||||
let page = get_diary::execute(&state.app_ctx, query).await?;
|
||||
let page = get_diary::execute(&state.app_ctx, params.into()).await?;
|
||||
|
||||
Ok(Json(DiaryResponse {
|
||||
items: page.items.iter().map(entry_to_dto).collect(),
|
||||
@@ -189,26 +513,8 @@ pub mod api {
|
||||
user: AuthenticatedUser,
|
||||
Json(req): Json<LogReviewRequest>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let watched_at = NaiveDateTime::parse_from_str(&req.watched_at, "%Y-%m-%dT%H:%M:%S")
|
||||
.map_err(|_| {
|
||||
ApiError(DomainError::ValidationError(
|
||||
"Invalid watched_at format, expected YYYY-MM-DDTHH:MM:SS".into(),
|
||||
))
|
||||
})?;
|
||||
|
||||
let cmd = LogReviewCommand {
|
||||
external_metadata_id: req.external_metadata_id,
|
||||
manual_title: req.manual_title,
|
||||
manual_release_year: req.manual_release_year,
|
||||
manual_director: req.manual_director,
|
||||
user_id: user.0.value(),
|
||||
rating: req.rating,
|
||||
comment: req.comment,
|
||||
watched_at,
|
||||
};
|
||||
|
||||
log_review::execute(&state.app_ctx, cmd).await?;
|
||||
|
||||
let data = LogReviewData::try_from(req).map_err(ApiError)?;
|
||||
log_review::execute(&state.app_ctx, data.into_command(user.0.value())).await?;
|
||||
Ok(StatusCode::CREATED)
|
||||
}
|
||||
|
||||
@@ -275,6 +581,26 @@ pub mod api {
|
||||
Ok(StatusCode::CREATED)
|
||||
}
|
||||
|
||||
pub async fn delete_review(
|
||||
State(state): State<AppState>,
|
||||
AuthenticatedUser(user_id): AuthenticatedUser,
|
||||
Path(review_id): Path<Uuid>,
|
||||
) -> impl IntoResponse {
|
||||
let cmd = DeleteReviewCommand {
|
||||
review_id,
|
||||
requesting_user_id: user_id.value(),
|
||||
};
|
||||
match delete_review::execute(&state.app_ctx, cmd).await {
|
||||
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||
Err(DomainError::NotFound(_)) => StatusCode::NOT_FOUND.into_response(),
|
||||
Err(DomainError::Unauthorized(_)) => StatusCode::FORBIDDEN.into_response(),
|
||||
Err(e) => {
|
||||
tracing::error!("delete_review error: {:?}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn movie_to_dto(movie: &Movie) -> MovieDto {
|
||||
MovieDto {
|
||||
id: movie.id().value(),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod event_handlers;
|
||||
pub mod dtos;
|
||||
pub mod errors;
|
||||
pub mod extractors;
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
||||
use event_publisher::{EventPublisherConfig, NoopEventPublisher, create_event_channel};
|
||||
use presentation::event_handlers::PosterSyncHandler;
|
||||
use std::str::FromStr;
|
||||
|
||||
use sqlx::SqlitePool;
|
||||
use sqlx::sqlite::SqliteConnectOptions;
|
||||
use tokio::net::TcpListener;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
@@ -18,15 +21,6 @@ use template_askama::AskamaHtmlRenderer;
|
||||
|
||||
use presentation::{routes, state::AppState};
|
||||
|
||||
struct StubEventPublisher;
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for StubEventPublisher {
|
||||
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
dotenvy::dotenv().ok();
|
||||
@@ -38,8 +32,11 @@ async fn main() -> anyhow::Result<()> {
|
||||
|
||||
let app = routes::build_router(state);
|
||||
|
||||
let listener = TcpListener::bind("0.0.0.0:3000").await?;
|
||||
tracing::info!("Listening on 0.0.0.0:3000");
|
||||
let host = std::env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string());
|
||||
let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string());
|
||||
let addr = format!("{}:{}", host, port);
|
||||
let listener = TcpListener::bind(&addr).await?;
|
||||
tracing::info!("Listening on {}", addr);
|
||||
axum::serve(listener, app).await?;
|
||||
|
||||
Ok(())
|
||||
@@ -51,7 +48,13 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
||||
let app_config = AppConfig::from_env();
|
||||
let omdb_api_key = std::env::var("OMDB_API_KEY").context("OMDB_API_KEY must be set")?;
|
||||
|
||||
let pool = SqlitePool::connect("sqlite://reviews.db")
|
||||
let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL must be set")?;
|
||||
let opts = SqliteConnectOptions::from_str(&database_url)
|
||||
.context("Invalid DATABASE_URL")?
|
||||
.create_if_missing(true)
|
||||
.journal_mode(sqlx::sqlite::SqliteJournalMode::Wal)
|
||||
.busy_timeout(std::time::Duration::from_secs(5));
|
||||
let pool = SqlitePool::connect_with(opts)
|
||||
.await
|
||||
.context("Failed to connect to SQLite database")?;
|
||||
|
||||
@@ -62,17 +65,48 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
.context("Database migration failed")?;
|
||||
|
||||
let user_repo = SqliteUserRepository::new(pool);
|
||||
use domain::ports::{
|
||||
AuthService, MetadataClient, MovieRepository, PasswordHasher,
|
||||
PosterFetcherClient, PosterStorage, UserRepository,
|
||||
};
|
||||
let repository: Arc<dyn MovieRepository> = Arc::new(movie_repo);
|
||||
let user_repository: Arc<dyn UserRepository> = Arc::new(SqliteUserRepository::new(pool));
|
||||
let metadata_client: Arc<dyn MetadataClient> = Arc::new(MetadataClientImpl::new_omdb(omdb_api_key));
|
||||
let poster_fetcher: Arc<dyn PosterFetcherClient> = Arc::new(ReqwestPosterFetcher::new(PosterFetcherConfig::from_env())?);
|
||||
let poster_storage: Arc<dyn PosterStorage> = Arc::new(PosterStorageAdapter::from_config(storage_config)?);
|
||||
let auth_service: Arc<dyn AuthService> = Arc::new(JwtAuthService::new(auth_config));
|
||||
let password_hasher: Arc<dyn PasswordHasher> = Arc::new(Argon2PasswordHasher);
|
||||
|
||||
// Build a context for the poster handler. sync_poster doesn't publish events,
|
||||
// so a noop publisher here is safe and avoids a circular dependency.
|
||||
let handler_ctx = AppContext {
|
||||
repository: Arc::clone(&repository),
|
||||
metadata_client: Arc::clone(&metadata_client),
|
||||
poster_fetcher: Arc::clone(&poster_fetcher),
|
||||
poster_storage: Arc::clone(&poster_storage),
|
||||
event_publisher: Arc::new(NoopEventPublisher),
|
||||
auth_service: Arc::clone(&auth_service),
|
||||
password_hasher: Arc::clone(&password_hasher),
|
||||
user_repository: Arc::clone(&user_repository),
|
||||
config: app_config.clone(),
|
||||
};
|
||||
|
||||
let poster_handler = PosterSyncHandler::new(handler_ctx, 3);
|
||||
let (event_publisher, event_worker) = create_event_channel(
|
||||
EventPublisherConfig::from_env(),
|
||||
vec![Box::new(poster_handler)],
|
||||
);
|
||||
tokio::spawn(event_worker.run());
|
||||
|
||||
let app_ctx = AppContext {
|
||||
repository: Arc::new(movie_repo),
|
||||
metadata_client: Arc::new(MetadataClientImpl::new_omdb(omdb_api_key)),
|
||||
poster_fetcher: Arc::new(ReqwestPosterFetcher::new(PosterFetcherConfig::from_env())?),
|
||||
poster_storage: Arc::new(PosterStorageAdapter::from_config(storage_config)?),
|
||||
event_publisher: Arc::new(StubEventPublisher),
|
||||
auth_service: Arc::new(JwtAuthService::new(auth_config)),
|
||||
password_hasher: Arc::new(Argon2PasswordHasher),
|
||||
user_repository: Arc::new(user_repo),
|
||||
repository,
|
||||
metadata_client,
|
||||
poster_fetcher,
|
||||
poster_storage,
|
||||
event_publisher: Arc::new(event_publisher),
|
||||
auth_service,
|
||||
password_hasher,
|
||||
user_repository,
|
||||
config: app_config,
|
||||
};
|
||||
|
||||
@@ -80,8 +114,7 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
||||
app_ctx,
|
||||
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
|
||||
rss_renderer: Arc::new(RssAdapter::new(
|
||||
"Movie Diary".into(),
|
||||
"http://localhost:3000".into(),
|
||||
std::env::var("BASE_URL").unwrap_or_else(|_| "http://localhost:3000".into()),
|
||||
)),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,8 +1,52 @@
|
||||
use axum::{Router, routing};
|
||||
use std::sync::{
|
||||
Arc,
|
||||
atomic::{AtomicU64, Ordering},
|
||||
};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use axum::{Router, http::StatusCode, middleware, response::IntoResponse, routing};
|
||||
use tower_http::{services::ServeDir, trace::TraceLayer};
|
||||
|
||||
use crate::{handlers, state::AppState};
|
||||
|
||||
const API_RATE_LIMIT: u64 = 20; // 20 requests per minute globally for API routes
|
||||
|
||||
/// Simple global rate limiter: tracks request count per 60-second window.
|
||||
/// Not per-IP — suitable for a low-traffic personal app.
|
||||
#[derive(Clone)]
|
||||
struct RateLimiter {
|
||||
window: Arc<AtomicU64>,
|
||||
count: Arc<AtomicU64>,
|
||||
limit: u64,
|
||||
}
|
||||
|
||||
impl RateLimiter {
|
||||
fn new(limit: u64) -> Self {
|
||||
Self {
|
||||
window: Arc::new(AtomicU64::new(0)),
|
||||
count: Arc::new(AtomicU64::new(0)),
|
||||
limit,
|
||||
}
|
||||
}
|
||||
|
||||
fn check(&self) -> bool {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs()
|
||||
/ 60;
|
||||
let prev = self.window.load(Ordering::Acquire);
|
||||
if now != prev {
|
||||
// compare_exchange ensures only one thread wins the window reset
|
||||
if self.window.compare_exchange(prev, now, Ordering::AcqRel, Ordering::Relaxed).is_ok() {
|
||||
self.count.store(1, Ordering::Release);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
self.count.fetch_add(1, Ordering::Relaxed) + 1 <= self.limit
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_router(state: AppState) -> Router {
|
||||
Router::new()
|
||||
.merge(html_routes())
|
||||
@@ -13,13 +57,73 @@ pub fn build_router(state: AppState) -> Router {
|
||||
}
|
||||
|
||||
fn html_routes() -> Router<AppState> {
|
||||
// Auth routes: 20 requests per minute globally.
|
||||
let limiter = RateLimiter::new(API_RATE_LIMIT);
|
||||
let auth = Router::new()
|
||||
.route(
|
||||
"/login",
|
||||
routing::get(handlers::html::get_login_page).post(handlers::html::post_login),
|
||||
)
|
||||
.route("/logout", routing::get(handlers::html::get_logout))
|
||||
.route(
|
||||
"/register",
|
||||
routing::get(handlers::html::get_register_page).post(handlers::html::post_register),
|
||||
)
|
||||
.route_layer(middleware::from_fn(
|
||||
move |req: axum::extract::Request, next: middleware::Next| {
|
||||
let limiter = limiter.clone();
|
||||
async move {
|
||||
if limiter.check() {
|
||||
next.run(req).await
|
||||
} else {
|
||||
StatusCode::TOO_MANY_REQUESTS.into_response()
|
||||
}
|
||||
}
|
||||
},
|
||||
));
|
||||
|
||||
Router::new()
|
||||
.route("/diary", routing::get(handlers::html::get_diary_page))
|
||||
.route("/", routing::get(handlers::html::get_activity_feed))
|
||||
.route("/users", routing::get(handlers::html::get_users_list))
|
||||
.route(
|
||||
"/users/{id}",
|
||||
routing::get(handlers::html::get_user_profile),
|
||||
)
|
||||
.merge(auth)
|
||||
.route(
|
||||
"/reviews/new",
|
||||
routing::get(handlers::html::get_new_review_page),
|
||||
)
|
||||
.route("/reviews", routing::post(handlers::html::post_review))
|
||||
.route(
|
||||
"/reviews/{id}/delete",
|
||||
routing::post(handlers::html::post_delete_review),
|
||||
)
|
||||
.route(
|
||||
"/posters/{path}",
|
||||
routing::get(handlers::posters::get_poster),
|
||||
)
|
||||
.route("/feed.rss", routing::get(handlers::rss::get_feed))
|
||||
.route(
|
||||
"/users/{id}/feed.rss",
|
||||
routing::get(handlers::rss::get_user_feed),
|
||||
)
|
||||
}
|
||||
|
||||
fn api_routes() -> Router<AppState> {
|
||||
let limiter = RateLimiter::new(API_RATE_LIMIT);
|
||||
let auth_rate_limit =
|
||||
middleware::from_fn(move |req: axum::extract::Request, next: middleware::Next| {
|
||||
let limiter = limiter.clone();
|
||||
async move {
|
||||
if limiter.check() {
|
||||
next.run(req).await
|
||||
} else {
|
||||
StatusCode::TOO_MANY_REQUESTS.into_response()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Router::new().nest(
|
||||
"/api",
|
||||
Router::new()
|
||||
@@ -29,11 +133,16 @@ fn api_routes() -> Router<AppState> {
|
||||
routing::get(handlers::api::get_review_history),
|
||||
)
|
||||
.route("/reviews", routing::post(handlers::api::post_review))
|
||||
.route(
|
||||
"/reviews/{id}",
|
||||
routing::delete(handlers::api::delete_review),
|
||||
)
|
||||
.route(
|
||||
"/movies/{id}/sync-poster",
|
||||
routing::post(handlers::api::sync_poster),
|
||||
)
|
||||
.route("/auth/login", routing::post(handlers::api::login))
|
||||
.route("/auth/register", routing::post(handlers::api::register)),
|
||||
.route("/auth/register", routing::post(handlers::api::register))
|
||||
.route_layer(auth_rate_limit),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -84,6 +84,8 @@ struct NobodyUserRepo;
|
||||
impl UserRepository for NobodyUserRepo {
|
||||
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { Ok(None) }
|
||||
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!() }
|
||||
async fn find_by_id(&self, _: &UserId) -> Result<Option<User>, DomainError> { panic!() }
|
||||
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> { panic!() }
|
||||
}
|
||||
|
||||
async fn test_app() -> Router {
|
||||
@@ -103,10 +105,10 @@ async fn test_app() -> Router {
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(NobodyUserRepo),
|
||||
config: AppConfig { allow_registration: false },
|
||||
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
|
||||
rss_renderer: Arc::new(RssAdapter::new("Movie Diary".into(), "http://localhost:3000".into())),
|
||||
rss_renderer: Arc::new(RssAdapter::new("http://localhost:3000".into())),
|
||||
};
|
||||
|
||||
routes::build_router(state)
|
||||
@@ -127,7 +129,7 @@ async fn get_api_diary_returns_empty_list() {
|
||||
|
||||
assert_eq!(json["total_count"], 0);
|
||||
assert_eq!(json["items"], serde_json::json!([]));
|
||||
assert_eq!(json["limit"], 20);
|
||||
assert_eq!(json["limit"], 5);
|
||||
assert_eq!(json["offset"], 0);
|
||||
}
|
||||
|
||||
|
||||
26
overview.md
26
overview.md
@@ -4,32 +4,36 @@ This project is a self-hosted, server-side rendered movie logging system designe
|
||||
|
||||
## Core Principles
|
||||
|
||||
* **Zero-JS & Bloat-Free:** The web interface relies strictly on standard HTML form submissions and server-side rendering (via Askama). There is absolutely no JavaScript, no Single Page Application (SPA) overhead, and no client-side state to manage.
|
||||
* **Personal & Embeddable:** It is designed for a single actor. Rather than being a commercial product or a bloated social network, it functions as a highly personal, iframe-ready widget for a personal site.
|
||||
* **Append-Only Ledger:** Reviews are not rows to be updated; they are immutable events. The system tracks a chronological history of viewings for the same movie, separating domain time (when it was watched) from system time (when it was logged), allowing the user to track how their cinematic taste evolves over time.
|
||||
* **Pristine Architecture:** It strictly adheres to Domain-Driven Design (DDD) and Hexagonal Architecture (Ports and Adapters). The core domain consists of strong value objects and pure business logic, entirely decoupled from external infrastructure like the SQLite database, TMDB API, and Axum HTTP router.
|
||||
* **Frictionless "Lazy" Logging:** While the backend is robust, the user experience is minimal. The system automatically fetches rich metadata and poster art in the background via external APIs, requiring only a TMDB ID and a 0-5 rating. It supports both classic HTML forms and a secure REST API for quick terminal or iOS shortcut entries.
|
||||
* **Old-School Syndication:** Instead of jumping into complex federalized moderation, the project embraces classic, open web standards by generating a native RSS/Atom feed, allowing others to subscribe to the movie diary without needing an account.
|
||||
|
||||
# common
|
||||
A tiny utility crate. This holds custom application error types (e.g., `AppError`) using a crate `thiserror`. Every other crate can use this so the project has unified error handling from the database right up to the HTTP response.
|
||||
- **Zero-JS & Bloat-Free:** The web interface relies strictly on standard HTML form submissions and server-side rendering (via Askama). There is absolutely no JavaScript, no Single Page Application (SPA) overhead, and no client-side state to manage.
|
||||
- **Personal & Embeddable:** It is designed for a single actor. Rather than being a commercial product or a bloated social network, it functions as a highly personal, iframe-ready widget for a personal site.
|
||||
- **Append-Only Ledger:** Reviews are not rows to be updated; they are immutable events. The system tracks a chronological history of viewings for the same movie, separating domain time (when it was watched) from system time (when it was logged), allowing the user to track how their cinematic taste evolves over time.
|
||||
- **Pristine Architecture:** It strictly adheres to Domain-Driven Design (DDD) and Hexagonal Architecture (Ports and Adapters). The core domain consists of strong value objects and pure business logic, entirely decoupled from external infrastructure like the SQLite database, TMDB API, and Axum HTTP router.
|
||||
- **Frictionless "Lazy" Logging:** While the backend is robust, the user experience is minimal. The system automatically fetches rich metadata and poster art in the background via external APIs, requiring only a TMDB ID and a 0-5 rating. It supports both classic HTML forms and a secure REST API for quick terminal or iOS shortcut entries.
|
||||
- **Old-School Syndication:** Instead of jumping into complex federalized moderation, the project embraces classic, open web standards by generating a native RSS/Atom feed, allowing others to subscribe to the movie diary without needing an account.
|
||||
|
||||
# domain
|
||||
|
||||
The absolute center. It has zero dependencies on other workspace crates. It holds pure data structures like `Movie`, `Review`, and `User`. This is also where project define the interfaces (Traits) like `MovieRepository`, `MetadataClient`, and `TokenValidator`. It does not know about infrastructure implementations like `SQLite` or JWTs.
|
||||
|
||||
# application
|
||||
|
||||
It sits between web endpoints and domain. It holds "Use Cases" (e.g., `LogNewMovie`, `GetRecentWatches`). When a request comes in, this crate coordinates the workflow: it asks the adapter-meta for the TMDB data, validates the 0-5 rating using domain rules, and tells database adapter to save it.
|
||||
|
||||
# sqlite
|
||||
|
||||
`SQLite` and `sqlx` implementation. It implements the `MovieRepository` trait defined in the domain.
|
||||
|
||||
# metadata
|
||||
|
||||
HTTP client (likely `reqwest`) that talks to `TMDB` or `OMDb`. It implements the `MetadataClient` trait.
|
||||
|
||||
# auth
|
||||
|
||||
This handles the JWT logic using a crate like `jsonwebtoken`. It issues the tokens when you log in and implements a `TokenValidator` trait to verify claims (like your admin ID) when a request is made.
|
||||
|
||||
# presentation
|
||||
|
||||
It wires all the traits and adapters together into Axum's application state. Inside this crate, you can split your routing into two clean modules:
|
||||
* `html_routes`: Uses Askama templates, handles standard form submissions, and checks for the JWT in cookies.
|
||||
* `rest_routes`: Speaks purely in JSON, handles your background API calls, and checks for the JWT in the Bearer header.
|
||||
|
||||
- `html_routes`: Uses Askama templates, handles standard form submissions, and checks for the JWT in cookies.
|
||||
- `rest_routes`: Speaks purely in JSON, handles your background API calls, and checks for the JWT in the Bearer header.
|
||||
|
||||
BIN
static/background.avif
Normal file
BIN
static/background.avif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 43 KiB |
519
static/style.css
Normal file
519
static/style.css
Normal file
@@ -0,0 +1,519 @@
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
:root {
|
||||
--glass-bg: rgba(255, 255, 255, 0.15);
|
||||
--glass-border: rgba(255, 255, 255, 0.35);
|
||||
--glass-shadow: 0 8px 32px oklch(85.2% 0.199 91.936 / 0.15);
|
||||
--glass-inset: inset 0 1px 0 rgba(255, 255, 255, 0.5);
|
||||
--blur: blur(12px);
|
||||
|
||||
--primary: oklch(85.2% 0.199 91.936);
|
||||
--primary-mid: oklch(89% 0.13 91.936);
|
||||
--primary-light: oklch(93% 0.07 91.936);
|
||||
--primary-glow: oklch(85.2% 0.199 91.936 / 0.3);
|
||||
|
||||
--text: #fff;
|
||||
--text-muted: rgba(255, 255, 255, 0.7);
|
||||
--text-light: rgba(255, 255, 255, 0.5);
|
||||
}
|
||||
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: "Nunito", sans-serif;
|
||||
max-width: 720px;
|
||||
margin: 0 auto;
|
||||
padding: 20px;
|
||||
color: var(--text);
|
||||
background: url("/static/background.avif") center / cover no-repeat fixed;
|
||||
min-height: 100%;
|
||||
line-height: 1.5;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
body::before {
|
||||
content: "";
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0, 0, 0, 0.2);
|
||||
z-index: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
body > * {
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
/* Header */
|
||||
header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
background: var(--glass-bg);
|
||||
backdrop-filter: var(--blur);
|
||||
-webkit-backdrop-filter: var(--blur);
|
||||
border: 1px solid var(--glass-border);
|
||||
border-radius: 20px;
|
||||
padding: 12px 20px;
|
||||
margin-bottom: 24px;
|
||||
box-shadow: var(--glass-shadow), var(--glass-inset);
|
||||
}
|
||||
|
||||
.site-title {
|
||||
font-weight: 800;
|
||||
font-size: 1.1em;
|
||||
text-decoration: none;
|
||||
color: var(--primary);
|
||||
text-shadow: 0 0 20px var(--primary-glow);
|
||||
}
|
||||
|
||||
nav {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
nav a {
|
||||
text-decoration: none;
|
||||
color: var(--primary);
|
||||
font-weight: 600;
|
||||
font-size: 0.85em;
|
||||
padding: 4px 14px;
|
||||
border-radius: 20px;
|
||||
background: oklch(85.2% 0.199 91.936 / 0.08);
|
||||
border: 1px solid oklch(85.2% 0.199 91.936 / 0.2);
|
||||
transition:
|
||||
background 0.2s,
|
||||
box-shadow 0.2s;
|
||||
}
|
||||
|
||||
nav a:hover {
|
||||
background: oklch(85.2% 0.199 91.936 / 0.2);
|
||||
box-shadow: 0 0 12px var(--primary-glow);
|
||||
}
|
||||
|
||||
/* Diary entries */
|
||||
.diary {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.entry {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
padding: 16px;
|
||||
background: var(--glass-bg);
|
||||
backdrop-filter: var(--blur);
|
||||
-webkit-backdrop-filter: var(--blur);
|
||||
border: 1px solid var(--glass-border);
|
||||
border-radius: 16px;
|
||||
box-shadow: var(--glass-shadow), var(--glass-inset);
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
transition:
|
||||
box-shadow 0.2s,
|
||||
transform 0.2s;
|
||||
}
|
||||
|
||||
.entry::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 50%;
|
||||
background: linear-gradient(
|
||||
to bottom,
|
||||
rgba(255, 255, 255, 0.12),
|
||||
transparent
|
||||
);
|
||||
border-radius: 16px 16px 0 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.entry:hover {
|
||||
transform: translateY(-2px);
|
||||
box-shadow:
|
||||
0 12px 40px var(--primary-glow),
|
||||
var(--glass-inset);
|
||||
}
|
||||
|
||||
.poster {
|
||||
width: 60px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.poster img {
|
||||
width: 100%;
|
||||
display: block;
|
||||
border-radius: 8px;
|
||||
border: 2px solid var(--primary-mid);
|
||||
box-shadow: 0 4px 12px var(--primary-glow);
|
||||
}
|
||||
|
||||
.entry-body {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.entry-title {
|
||||
font-weight: 700;
|
||||
margin-bottom: 2px;
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.year {
|
||||
color: var(--text-muted);
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
.director {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.88em;
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
|
||||
.rating {
|
||||
margin-bottom: 4px;
|
||||
font-size: 1.15em;
|
||||
letter-spacing: 3px;
|
||||
}
|
||||
|
||||
.star.filled {
|
||||
color: var(--primary);
|
||||
text-shadow: 0 0 8px var(--primary-glow), 0 0 2px var(--primary);
|
||||
}
|
||||
|
||||
.star.empty {
|
||||
color: rgba(255, 255, 255, 0.2);
|
||||
}
|
||||
|
||||
.comment {
|
||||
font-style: italic;
|
||||
color: var(--text-light);
|
||||
font-size: 0.9em;
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
|
||||
.watched-at {
|
||||
font-size: 0.8em;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.empty {
|
||||
color: var(--text-muted);
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
/* Pagination */
|
||||
.pagination {
|
||||
margin-top: 24px;
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.pagination a {
|
||||
text-decoration: none;
|
||||
color: var(--primary);
|
||||
font-weight: 600;
|
||||
padding: 4px 16px;
|
||||
border-radius: 20px;
|
||||
background: oklch(85.2% 0.199 91.936 / 0.08);
|
||||
border: 1px solid oklch(85.2% 0.199 91.936 / 0.2);
|
||||
transition:
|
||||
background 0.2s,
|
||||
box-shadow 0.2s;
|
||||
}
|
||||
|
||||
.pagination a:hover {
|
||||
background: oklch(85.2% 0.199 91.936 / 0.2);
|
||||
box-shadow: 0 0 12px var(--primary-glow);
|
||||
}
|
||||
|
||||
/* Forms */
|
||||
h1 {
|
||||
font-size: 1.4em;
|
||||
font-weight: 800;
|
||||
color: var(--text);
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
form {
|
||||
max-width: 400px;
|
||||
}
|
||||
|
||||
form label {
|
||||
display: block;
|
||||
margin-bottom: 14px;
|
||||
font-size: 0.9em;
|
||||
font-weight: 600;
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
form input[type="text"],
|
||||
form input[type="email"],
|
||||
form input[type="password"],
|
||||
form input[type="number"],
|
||||
form input[type="datetime-local"],
|
||||
form textarea {
|
||||
display: block;
|
||||
width: 100%;
|
||||
font-family: "Nunito", sans-serif;
|
||||
font-size: 1em;
|
||||
margin-top: 4px;
|
||||
padding: 8px 12px;
|
||||
border: 1px solid var(--glass-border);
|
||||
border-radius: 10px;
|
||||
background: rgba(255, 255, 255, 0.5);
|
||||
backdrop-filter: blur(4px);
|
||||
-webkit-backdrop-filter: blur(4px);
|
||||
color: var(--text);
|
||||
transition:
|
||||
border-color 0.2s,
|
||||
box-shadow 0.2s;
|
||||
}
|
||||
|
||||
form input:focus,
|
||||
form textarea:focus {
|
||||
outline: none;
|
||||
border-color: var(--primary);
|
||||
box-shadow: 0 0 0 3px oklch(85.2% 0.199 91.936 / 0.2);
|
||||
}
|
||||
|
||||
form textarea {
|
||||
height: 80px;
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
form hr {
|
||||
border: none;
|
||||
border-top: 1px solid rgba(255, 255, 255, 0.3);
|
||||
margin: 18px 0;
|
||||
}
|
||||
|
||||
form button[type="submit"] {
|
||||
font-family: "Nunito", sans-serif;
|
||||
font-size: 1em;
|
||||
font-weight: 700;
|
||||
padding: 10px 28px;
|
||||
cursor: pointer;
|
||||
background: linear-gradient(
|
||||
135deg,
|
||||
var(--primary-mid) 0%,
|
||||
var(--primary) 60%,
|
||||
oklch(72% 0.199 91.936) 100%
|
||||
);
|
||||
color: #fff;
|
||||
border: none;
|
||||
border-radius: 24px;
|
||||
box-shadow:
|
||||
0 4px 16px var(--primary-glow),
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.4);
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
transition:
|
||||
transform 0.15s,
|
||||
box-shadow 0.15s;
|
||||
}
|
||||
|
||||
form button[type="submit"]::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 50%;
|
||||
background: linear-gradient(
|
||||
to bottom,
|
||||
rgba(255, 255, 255, 0.28),
|
||||
transparent
|
||||
);
|
||||
border-radius: 24px 24px 0 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
form button[type="submit"]:hover {
|
||||
transform: translateY(-1px);
|
||||
box-shadow:
|
||||
0 6px 24px var(--primary-glow),
|
||||
inset 0 1px 0 rgba(255, 255, 255, 0.4);
|
||||
}
|
||||
|
||||
.delete-form {
|
||||
display: inline;
|
||||
margin-top: 6px;
|
||||
}
|
||||
|
||||
.delete-form button[type="submit"] {
|
||||
background: rgba(200, 60, 60, 0.65);
|
||||
backdrop-filter: blur(4px);
|
||||
-webkit-backdrop-filter: blur(4px);
|
||||
color: #fff;
|
||||
border-radius: 16px;
|
||||
padding: 3px 12px;
|
||||
font-size: 0.8em;
|
||||
font-weight: 600;
|
||||
border: 1px solid rgba(220, 80, 80, 0.3);
|
||||
box-shadow: none;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
.delete-form button[type="submit"]::after {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.delete-form button[type="submit"]:hover {
|
||||
background: rgba(220, 60, 60, 0.85);
|
||||
transform: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.optional {
|
||||
color: var(--text-muted);
|
||||
font-size: 0.85em;
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
/* Errors */
|
||||
.error {
|
||||
color: #e05050;
|
||||
background: rgba(220, 50, 50, 0.1);
|
||||
padding: 8px 12px;
|
||||
border-radius: 8px;
|
||||
border: 1px solid rgba(220, 50, 50, 0.2);
|
||||
margin-bottom: 16px;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
/* ---- Activity feed ---- */
|
||||
.feed-meta {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-top: 0.25rem;
|
||||
font-size: 0.8rem;
|
||||
opacity: 0.7;
|
||||
}
|
||||
.feed-user {
|
||||
color: var(--primary);
|
||||
text-decoration: none;
|
||||
font-weight: 600;
|
||||
}
|
||||
.feed-user:hover { text-decoration: underline; }
|
||||
.feed-time { opacity: 0.6; }
|
||||
|
||||
/* ---- Users list ---- */
|
||||
.users-list { display: flex; flex-direction: column; gap: 0.75rem; }
|
||||
.page-title { font-size: 1.2rem; font-weight: 700; margin-bottom: 1rem; opacity: 0.9; }
|
||||
.user-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
background: rgba(255,255,255,0.07);
|
||||
border-radius: 12px;
|
||||
padding: 0.75rem 1rem;
|
||||
}
|
||||
.user-avatar {
|
||||
width: 40px; height: 40px;
|
||||
border-radius: 50%;
|
||||
background: rgba(74,158,255,0.2);
|
||||
display: flex; align-items: center; justify-content: center;
|
||||
font-size: 1.1rem; font-weight: 700;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.user-info { flex: 1; }
|
||||
.user-name { font-weight: 600; font-size: 0.95rem; }
|
||||
.user-meta { font-size: 0.8rem; opacity: 0.6; margin-top: 0.1rem; }
|
||||
.btn-secondary {
|
||||
color: var(--primary);
|
||||
font-size: 0.85rem;
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.btn-secondary:hover { text-decoration: underline; }
|
||||
|
||||
/* ---- Profile stats header ---- */
|
||||
.profile { display: flex; flex-direction: column; gap: 1rem; }
|
||||
.stats-header {
|
||||
background: rgba(255,255,255,0.06);
|
||||
border-radius: 14px;
|
||||
padding: 1rem 1.25rem;
|
||||
}
|
||||
.profile-name { font-size: 1.1rem; font-weight: 700; margin-bottom: 0.75rem; }
|
||||
.stats-grid { display: grid; grid-template-columns: repeat(4, 1fr); gap: 0.5rem; }
|
||||
.stat-tile {
|
||||
background: rgba(255,255,255,0.06);
|
||||
border-radius: 10px;
|
||||
padding: 0.6rem 0.5rem;
|
||||
text-align: center;
|
||||
}
|
||||
.stat-value { font-size: 1.1rem; font-weight: 700; color: var(--primary); }
|
||||
.stat-label { font-size: 0.7rem; opacity: 0.5; margin-top: 0.1rem; }
|
||||
|
||||
/* ---- View tabs ---- */
|
||||
.view-tabs { display: flex; gap: 0.4rem; flex-wrap: wrap; }
|
||||
.view-tab {
|
||||
padding: 0.3rem 0.75rem;
|
||||
border-radius: 8px;
|
||||
font-size: 0.85rem;
|
||||
text-decoration: none;
|
||||
color: rgba(255,255,255,0.7);
|
||||
background: rgba(255,255,255,0.08);
|
||||
transition: background 0.15s;
|
||||
}
|
||||
.view-tab:hover { background: rgba(255,255,255,0.14); }
|
||||
.view-tab.active { background: var(--primary); color: #fff; font-weight: 600; }
|
||||
|
||||
/* ---- History heatmap ---- */
|
||||
.heatmap-section { background: rgba(255,255,255,0.06); border-radius: 12px; padding: 1rem; }
|
||||
.heatmap-label { font-size: 0.8rem; opacity: 0.5; margin-bottom: 0.6rem; }
|
||||
.heatmap { display: grid; grid-template-columns: repeat(12, 1fr); gap: 4px; }
|
||||
.heatmap-cell {
|
||||
border-radius: 6px;
|
||||
padding: 0.4rem 0.2rem;
|
||||
text-align: center;
|
||||
min-height: 48px;
|
||||
display: flex; flex-direction: column; align-items: center; justify-content: center;
|
||||
background: oklch(85.2% 0.199 91.936 / var(--alpha, 0.05));
|
||||
}
|
||||
.heatmap-count { font-size: 0.85rem; font-weight: 700; }
|
||||
.heatmap-month { font-size: 0.65rem; opacity: 0.6; margin-top: 2px; }
|
||||
|
||||
/* ---- History month sections ---- */
|
||||
.history-month { margin-top: 1rem; }
|
||||
.month-heading { font-size: 0.95rem; font-weight: 600; margin-bottom: 0.5rem; opacity: 0.8; }
|
||||
.month-count { font-size: 0.8rem; opacity: 0.5; font-weight: 400; }
|
||||
|
||||
/* ---- Trends charts ---- */
|
||||
.trends-section { display: flex; flex-direction: column; gap: 1.25rem; }
|
||||
.chart-block { background: rgba(255,255,255,0.06); border-radius: 12px; padding: 1rem; }
|
||||
.chart-label { font-size: 0.8rem; opacity: 0.5; margin-bottom: 0.75rem; }
|
||||
|
||||
.bar-chart {
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
gap: 4px;
|
||||
}
|
||||
.bar-col { flex: 1; display: flex; flex-direction: column; align-items: center; gap: 2px; }
|
||||
.bar-value { font-size: 0.6rem; color: var(--primary); opacity: 0.9; line-height: 1; }
|
||||
.bar-fill { width: 100%; background: var(--primary); border-radius: 3px 3px 0 0; min-height: 3px; opacity: 0.8; }
|
||||
.bar-month { font-size: 0.65rem; opacity: 0.5; }
|
||||
|
||||
.director-chart { display: flex; flex-direction: column; gap: 6px; }
|
||||
.director-row { display: flex; align-items: center; gap: 0.6rem; }
|
||||
.director-name { font-size: 0.85rem; width: 140px; flex-shrink: 0; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.director-bar { flex: 1; background: rgba(255,255,255,0.08); border-radius: 4px; height: 10px; overflow: hidden; }
|
||||
.director-bar-fill { height: 100%; background: var(--primary); border-radius: 4px; opacity: 0.8; }
|
||||
.director-count { font-size: 0.8rem; opacity: 0.5; width: 20px; text-align: right; }
|
||||
Reference in New Issue
Block a user