Compare commits
86 Commits
ba42d3d445
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 7a66661932 | |||
| b30a6a102b | |||
| 38a3aa6bbf | |||
| 3135a15cb3 | |||
| d083f8ae3d | |||
| 874c406d4a | |||
| 78e1f4ef72 | |||
| cf74b06b4a | |||
| 317898d51b | |||
| 790bb6fbb5 | |||
| 658df38788 | |||
| cff0f854fa | |||
| 66ade70273 | |||
| cbd2ac5b3e | |||
| 0433cd4d9b | |||
| b5a8ea2395 | |||
| 49b79799c1 | |||
| f4aba551a2 | |||
| 91df35dbd3 | |||
| 623f90e43f | |||
| e28f628c80 | |||
| 60c25d4c24 | |||
| 22aafe99be | |||
| 0ff22cca5f | |||
| ccc39e27e4 | |||
| 76319756f4 | |||
| 7703227970 | |||
| b9933bb48d | |||
| 0c48708ce6 | |||
| a2a889bced | |||
| a4846f3bea | |||
| 27be840faa | |||
| 965fc0eda8 | |||
| d700b85337 | |||
| ffbab75910 | |||
| dda7c40f7f | |||
| 1b827b1bdd | |||
| 1ee6873a60 | |||
| 7352b533ff | |||
| 85e254fee2 | |||
| fa8221322d | |||
| 38da37de55 | |||
| f3dedbad8a | |||
| d468ce131f | |||
| d034af9e9c | |||
| 59d308f41b | |||
| bbb2ee00d6 | |||
| 5dd9aac68d | |||
| 6dcc4c8317 | |||
| e31d99a240 | |||
| 41fec1efa5 | |||
| 160c08d1c4 | |||
| 7aa6d7bf4d | |||
| 144f2f8e0c | |||
| cff64f7a6b | |||
| 5baff54cb9 | |||
| f94d2db8b1 | |||
| 48875a6e86 | |||
| 9387ae705b | |||
| 9871e21bc0 | |||
| fa8efbaa23 | |||
| d769a5b55c | |||
| 8e1fb1a974 | |||
| 6145b873f5 | |||
| cc668ae44d | |||
| e5097c22dd | |||
| 450468ef3d | |||
| 6e7c6467a7 | |||
| 7f815f8207 | |||
| 5df89200d4 | |||
| eb273dc277 | |||
| 5689db0ad7 | |||
| 5c70b8b8be | |||
| 4c547df04e | |||
| 602df8df22 | |||
| 5b69a3a7c0 | |||
| a38f78d261 | |||
| 17f90726e8 | |||
| 563f33212e | |||
| 8e5ac9f433 | |||
| f790fa2a0f | |||
| edcf3c1170 | |||
| 1985d2c57f | |||
| f0b3d8ad90 | |||
| da72ab1446 | |||
| 93c65cd155 |
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[env]
|
||||
SQLX_OFFLINE = "true"
|
||||
10
.dockerignore
Normal file
10
.dockerignore
Normal file
@@ -0,0 +1,10 @@
|
||||
target/
|
||||
.git/
|
||||
.env
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
.cargo/
|
||||
.sqlx/
|
||||
docs/
|
||||
dev.db
|
||||
13
.env.example
13
.env.example
@@ -0,0 +1,13 @@
|
||||
DATABASE_URL=sqlite:./dev.db
|
||||
BASE_URL=http://localhost:3000
|
||||
PORT=3000
|
||||
SECURE_COOKIES=false
|
||||
JWT_SECRET=
|
||||
JWT_TTL_SECONDS=
|
||||
ALLOW_REGISTRATION=true
|
||||
OMDB_API_KEY=
|
||||
POSTER_FETCH_TIMEOUT_SECONDS=30
|
||||
MINIO_ENDPOINT=
|
||||
MINIO_ACCESS_KEY_ID=
|
||||
MINIO_SECRET_ACCESS_KEY=
|
||||
MINIO_BUCKET=
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -8,5 +8,9 @@
|
||||
.env.prod
|
||||
|
||||
*.db
|
||||
*db-shm
|
||||
*db-wal
|
||||
|
||||
.worktrees/
|
||||
.superpowers/
|
||||
docs/
|
||||
|
||||
20
.sqlx/query-0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23.json
generated
Normal file
20
.sqlx/query-0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) FROM reviews WHERE user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "COUNT(*)",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "0cd1a7b7255a0ee753deffab7cbb48027d22900a570b98a636c780cb3e2efd23"
|
||||
}
|
||||
32
.sqlx/query-167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171.json
generated
Normal file
32
.sqlx/query-167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171.json
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
|
||||
}
|
||||
12
.sqlx/query-18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82.json
generated
Normal file
12
.sqlx/query-18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
|
||||
}
|
||||
32
.sqlx/query-1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399.json
generated
Normal file
32
.sqlx/query-1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399.json
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
|
||||
}
|
||||
98
.sqlx/query-217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee.json
generated
Normal file
98
.sqlx/query-217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee.json
generated
Normal file
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,\n u.email AS user_email\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n INNER JOIN users u ON u.id = r.user_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_email",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "217854179b4f77897178e6cfae51fb743e5be49ffc59826509be37a7cc81b6ee"
|
||||
}
|
||||
20
.sqlx/query-4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75.json
generated
Normal file
20
.sqlx/query-4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT strftime('%Y-%m', watched_at) AS month\n FROM reviews\n WHERE user_id = ?\n GROUP BY month\n ORDER BY COUNT(*) DESC\n LIMIT 1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "month",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "4d85f0ff9732576bba77dc84d3885a0002c2b600c34ba4d99f1e1c5e99f35e75"
|
||||
}
|
||||
92
.sqlx/query-5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199.json
generated
Normal file
92
.sqlx/query-5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "5a861b5a934c9831ff17d896fa48feb95e6dab051c5ac55a66f9793482522199"
|
||||
}
|
||||
56
.sqlx/query-70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2.json
generated
Normal file
56
.sqlx/query-70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2.json
generated
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2"
|
||||
}
|
||||
92
.sqlx/query-8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e.json
generated
Normal file
92
.sqlx/query-8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.watched_at DESC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "8d144859b397a842118c2dc4ab30e74015a814ed8185b6f86fbe39e641ab804e"
|
||||
}
|
||||
26
.sqlx/query-a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22.json
generated
Normal file
26
.sqlx/query-a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22.json
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(DISTINCT movie_id) AS \"total!: i64\",\n AVG(CAST(rating AS REAL)) AS avg_rating\n FROM reviews WHERE user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "total!: i64",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "avg_rating",
|
||||
"ordinal": 1,
|
||||
"type_info": "Float"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22"
|
||||
}
|
||||
92
.sqlx/query-a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c.json
generated
Normal file
92
.sqlx/query-a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ?\n ORDER BY r.rating DESC, r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "a3f4385bac7f78a9959648fb325d37096c87859ded1762137ce745955f46830c"
|
||||
}
|
||||
26
.sqlx/query-aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e.json
generated
Normal file
26
.sqlx/query-aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e.json
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.director AS \"director!\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 5",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "director!",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "count!: i64",
|
||||
"ordinal": 1,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "aca9e7aaa32c23b4de3f5048d60340e978d31a36be9121da3c59378f2fc1ed8e"
|
||||
}
|
||||
20
.sqlx/query-d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2.json
generated
Normal file
20
.sqlx/query-d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.director\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.user_id = ? AND m.director IS NOT NULL\n GROUP BY m.director\n ORDER BY COUNT(*) DESC\n LIMIT 1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "d5d2a81306488a8cee5654cea7e14d76d76ecc7d2190ffb73d12bec2874111d2"
|
||||
}
|
||||
12
.sqlx/query-e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883.json
generated
Normal file
12
.sqlx/query-e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "DELETE FROM movies WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883"
|
||||
}
|
||||
38
.sqlx/query-f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318.json
generated
Normal file
38
.sqlx/query-f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318.json
generated
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT u.id,\n u.email,\n COUNT(DISTINCT r.movie_id) AS \"total_movies!: i64\",\n AVG(CAST(r.rating AS REAL)) AS avg_rating\n FROM users u\n LEFT JOIN reviews r ON r.user_id = u.id\n GROUP BY u.id, u.email\n ORDER BY u.email ASC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "total_movies!: i64",
|
||||
"ordinal": 2,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "avg_rating",
|
||||
"ordinal": 3,
|
||||
"type_info": "Float"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318"
|
||||
}
|
||||
12
.sqlx/query-f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc.json
generated
Normal file
12
.sqlx/query-f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "DELETE FROM reviews WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc"
|
||||
}
|
||||
32
.sqlx/query-fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317.json
generated
Normal file
32
.sqlx/query-fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317.json
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT strftime('%Y-%m', watched_at) AS \"month!\",\n AVG(CAST(rating AS REAL)) AS \"avg_rating!: f64\",\n COUNT(*) AS \"count!: i64\"\n FROM reviews\n WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')\n GROUP BY \"month!\"\n ORDER BY \"month!\" ASC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "month!",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "avg_rating!: f64",
|
||||
"ordinal": 1,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "count!: i64",
|
||||
"ordinal": 2,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "fdd5b522f26b5e0ce62f76c774fbb606fd9ee9884f4457831f693a0df3609317"
|
||||
}
|
||||
1116
Cargo.lock
generated
1116
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
23
Cargo.toml
23
Cargo.toml
@@ -1,11 +1,14 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/adapters/auth",
|
||||
"crates/adapters/event-publisher",
|
||||
"crates/adapters/metadata",
|
||||
"crates/adapters/poster-fetcher",
|
||||
"crates/adapters/poster-storage",
|
||||
"crates/adapters/rss",
|
||||
"crates/adapters/sqlite", "crates/adapters/template-askama",
|
||||
"crates/adapters/sqlite",
|
||||
"crates/adapters/template-askama",
|
||||
"crates/application",
|
||||
"crates/common",
|
||||
"crates/domain",
|
||||
"crates/presentation",
|
||||
]
|
||||
@@ -13,6 +16,7 @@ resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
dotenvy = "0.15"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
anyhow = "1.0"
|
||||
@@ -22,14 +26,23 @@ tracing-subscriber = { version = "0.3.23", features = ["env-filter"] }
|
||||
async-trait = "0.1"
|
||||
uuid = { version = "1.23.0", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "sqlite", "uuid", "macros"] }
|
||||
template-askama = { path = "crates/adapters/template-askama" }
|
||||
sqlx = { version = "0.8.6", features = [
|
||||
"runtime-tokio-rustls",
|
||||
"sqlite",
|
||||
"uuid",
|
||||
"macros",
|
||||
] }
|
||||
reqwest = { version = "0.13", features = ["json", "query"] }
|
||||
object_store = { version = "0.11", features = ["aws"] }
|
||||
|
||||
domain = { path = "crates/domain" }
|
||||
common = { path = "crates/common" }
|
||||
application = { path = "crates/application" }
|
||||
presentation = { path = "crates/presentation" }
|
||||
auth = { path = "crates/adapters/auth" }
|
||||
metadata = { path = "crates/adapters/metadata" }
|
||||
poster-fetcher = { path = "crates/adapters/poster-fetcher" }
|
||||
poster-storage = { path = "crates/adapters/poster-storage" }
|
||||
event-publisher = { path = "crates/adapters/event-publisher" }
|
||||
rss = { path = "crates/adapters/rss" }
|
||||
sqlite = { path = "crates/adapters/sqlite" }
|
||||
template-askama = { path = "crates/adapters/template-askama" }
|
||||
|
||||
57
Dockerfile
Normal file
57
Dockerfile
Normal file
@@ -0,0 +1,57 @@
|
||||
# ----- build -----
|
||||
FROM rust:slim-bookworm AS builder
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends sqlite3 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
# Cache dependency compilation separately from source
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY crates/adapters/auth/Cargo.toml crates/adapters/auth/Cargo.toml
|
||||
COPY crates/adapters/event-publisher/Cargo.toml crates/adapters/event-publisher/Cargo.toml
|
||||
COPY crates/adapters/metadata/Cargo.toml crates/adapters/metadata/Cargo.toml
|
||||
COPY crates/adapters/poster-fetcher/Cargo.toml crates/adapters/poster-fetcher/Cargo.toml
|
||||
COPY crates/adapters/poster-storage/Cargo.toml crates/adapters/poster-storage/Cargo.toml
|
||||
COPY crates/adapters/rss/Cargo.toml crates/adapters/rss/Cargo.toml
|
||||
COPY crates/adapters/sqlite/Cargo.toml crates/adapters/sqlite/Cargo.toml
|
||||
COPY crates/adapters/template-askama/Cargo.toml crates/adapters/template-askama/Cargo.toml
|
||||
COPY crates/application/Cargo.toml crates/application/Cargo.toml
|
||||
COPY crates/domain/Cargo.toml crates/domain/Cargo.toml
|
||||
COPY crates/presentation/Cargo.toml crates/presentation/Cargo.toml
|
||||
|
||||
# Stub every crate so cargo can resolve and fetch deps
|
||||
RUN find crates -name "Cargo.toml" | sed 's|/Cargo.toml||' | \
|
||||
xargs -I{} sh -c 'mkdir -p {}/src && echo "fn main(){}" > {}/src/main.rs && echo "" > {}/src/lib.rs'
|
||||
|
||||
RUN cargo fetch
|
||||
|
||||
# Now copy real sources (invalidates cache only on source changes)
|
||||
COPY crates ./crates
|
||||
|
||||
# sqlx macros verify queries at compile time; create a real DB from migrations
|
||||
RUN sqlite3 /build/dev.db \
|
||||
< crates/adapters/sqlite/migrations/0001_initial.sql && \
|
||||
sqlite3 /build/dev.db \
|
||||
< crates/adapters/sqlite/migrations/0002_users.sql
|
||||
|
||||
ENV DATABASE_URL=sqlite:///build/dev.db
|
||||
|
||||
RUN cargo build --release -p presentation
|
||||
|
||||
# ----- runtime -----
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /build/target/release/presentation ./presentation
|
||||
COPY static ./static
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
ENV RUST_LOG=presentation=info,tower_http=info
|
||||
|
||||
CMD ["./presentation"]
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2026 Gabriel Kaszewski
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
85
README.md
Normal file
85
README.md
Normal file
@@ -0,0 +1,85 @@
|
||||
# Movies Diary
|
||||
|
||||
A self-hosted, server-side rendered movie logging system. Built in Rust — no JavaScript, no SPA, just HTML forms and an RSS feed. Designed to run as a lightweight widget embedded on a personal site.
|
||||
|
||||
## Features
|
||||
|
||||
- Log movies with a TMDB/OMDb ID and a 0–5 rating
|
||||
- Immutable append-only viewing ledger (tracks re-watches)
|
||||
- Background poster fetching and storage (S3-compatible)
|
||||
- RSS/Atom feed for public subscription
|
||||
- JWT authentication via cookie (HTML) or Bearer token (REST API)
|
||||
- Zero JavaScript
|
||||
|
||||
## Architecture
|
||||
|
||||
Hexagonal (Ports & Adapters) with Domain-Driven Design:
|
||||
|
||||
```
|
||||
domain — pure types and trait definitions, no external deps
|
||||
application — use cases / business logic orchestration
|
||||
presentation — Axum HTTP router, wires all adapters together
|
||||
adapters/
|
||||
auth — JWT issuance and validation (Argon2 passwords)
|
||||
sqlite — SQLite repository via sqlx
|
||||
metadata — OMDb HTTP client
|
||||
poster-fetcher — downloads poster images
|
||||
poster-storage — uploads posters to S3-compatible storage
|
||||
template-askama — Askama HTML rendering
|
||||
rss — RSS/Atom feed generation
|
||||
event-publisher — async event channel for background poster sync
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Rust (stable, 2024 edition)
|
||||
- SQLite
|
||||
- An S3-compatible object store (e.g. MinIO) for poster storage
|
||||
- An [OMDb API key](https://www.omdbapi.com/apikey.aspx)
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Copy and fill in the following (e.g. in a `.env` file):
|
||||
|
||||
```env
|
||||
# Database
|
||||
DATABASE_URL=sqlite://movies.db
|
||||
|
||||
# Authentication
|
||||
JWT_SECRET=change-me
|
||||
JWT_TTL_SECONDS=86400
|
||||
|
||||
# OMDb metadata
|
||||
OMDB_API_KEY=your-key
|
||||
|
||||
# Poster storage (S3-compatible)
|
||||
MINIO_ENDPOINT=http://localhost:9000
|
||||
MINIO_BUCKET=posters
|
||||
MINIO_REGION=us-east-1
|
||||
MINIO_ACCESS_KEY_ID=minioadmin
|
||||
MINIO_SECRET_ACCESS_KEY=minioadmin
|
||||
|
||||
# Optional
|
||||
ALLOW_REGISTRATION=false
|
||||
POSTER_FETCH_TIMEOUT_SECONDS=10
|
||||
EVENT_CHANNEL_BUFFER=32
|
||||
RUST_LOG=presentation=debug,tower_http=debug
|
||||
```
|
||||
|
||||
## Run
|
||||
|
||||
```bash
|
||||
cargo run -p presentation
|
||||
```
|
||||
|
||||
Server listens on `0.0.0.0:3000`.
|
||||
|
||||
## Test
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT License. See [LICENSE](LICENSE).
|
||||
@@ -6,3 +6,10 @@ edition = "2024"
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
domain = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
jsonwebtoken = "9"
|
||||
argon2 = { version = "0.5", features = ["std"] }
|
||||
rand_core = { version = "0.6", features = ["getrandom"] }
|
||||
|
||||
@@ -1,13 +1,104 @@
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, ports::AuthService, value_objects::UserId};
|
||||
use argon2::{
|
||||
Argon2,
|
||||
password_hash::{PasswordHasher as _, PasswordVerifier, SaltString},
|
||||
};
|
||||
use chrono::{Duration, Utc};
|
||||
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation, decode, encode};
|
||||
use rand_core::OsRng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
pub struct StubAuthService;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
ports::{AuthService, GeneratedToken, PasswordHasher},
|
||||
value_objects::{PasswordHash, UserId},
|
||||
};
|
||||
|
||||
#[async_trait]
|
||||
impl AuthService for StubAuthService {
|
||||
async fn validate_token(&self, _token: &str) -> Result<UserId, DomainError> {
|
||||
Err(DomainError::InfrastructureError(
|
||||
"auth service not implemented".into(),
|
||||
))
|
||||
pub struct AuthConfig {
|
||||
secret: String,
|
||||
ttl_seconds: u64,
|
||||
}
|
||||
|
||||
impl AuthConfig {
|
||||
pub fn from_env() -> anyhow::Result<Self> {
|
||||
let secret = std::env::var("JWT_SECRET")
|
||||
.map_err(|_| anyhow::anyhow!("JWT_SECRET env var is required"))?;
|
||||
if secret.is_empty() {
|
||||
anyhow::bail!("JWT_SECRET must not be empty");
|
||||
}
|
||||
let ttl_seconds = std::env::var("JWT_TTL_SECONDS")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(86400u64);
|
||||
Ok(Self { secret, ttl_seconds })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Claims {
|
||||
sub: String,
|
||||
exp: u64,
|
||||
}
|
||||
|
||||
pub struct JwtAuthService {
|
||||
config: AuthConfig,
|
||||
}
|
||||
|
||||
impl JwtAuthService {
|
||||
pub fn new(config: AuthConfig) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AuthService for JwtAuthService {
|
||||
async fn generate_token(&self, user_id: &UserId) -> Result<GeneratedToken, DomainError> {
|
||||
let expires_at = Utc::now() + Duration::seconds(self.config.ttl_seconds as i64);
|
||||
let claims = Claims {
|
||||
sub: user_id.value().to_string(),
|
||||
exp: expires_at.timestamp() as u64,
|
||||
};
|
||||
let token = encode(
|
||||
&Header::default(),
|
||||
&claims,
|
||||
&EncodingKey::from_secret(self.config.secret.as_bytes()),
|
||||
)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
Ok(GeneratedToken { token, expires_at })
|
||||
}
|
||||
|
||||
async fn validate_token(&self, token: &str) -> Result<UserId, DomainError> {
|
||||
let data = decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(self.config.secret.as_bytes()),
|
||||
&Validation::default(),
|
||||
)
|
||||
.map_err(|_| DomainError::Unauthorized("Invalid or expired token".into()))?;
|
||||
let uuid = Uuid::parse_str(&data.claims.sub)
|
||||
.map_err(|_| DomainError::Unauthorized("Invalid token subject".into()))?;
|
||||
Ok(UserId::from_uuid(uuid))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Argon2PasswordHasher;
|
||||
|
||||
#[async_trait]
|
||||
impl PasswordHasher for Argon2PasswordHasher {
|
||||
async fn hash(&self, plain_password: &str) -> Result<PasswordHash, DomainError> {
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
let hash = Argon2::default()
|
||||
.hash_password(plain_password.as_bytes(), &salt)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
|
||||
.to_string();
|
||||
PasswordHash::new(hash).map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
||||
}
|
||||
|
||||
async fn verify(&self, plain_password: &str, hash: &PasswordHash) -> Result<bool, DomainError> {
|
||||
let parsed = argon2::password_hash::PasswordHash::new(hash.value())
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
Ok(Argon2::default()
|
||||
.verify_password(plain_password.as_bytes(), &parsed)
|
||||
.is_ok())
|
||||
}
|
||||
}
|
||||
|
||||
10
crates/adapters/event-publisher/Cargo.toml
Normal file
10
crates/adapters/event-publisher/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "event-publisher"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
domain = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
209
crates/adapters/event-publisher/src/lib.rs
Normal file
209
crates/adapters/event-publisher/src/lib.rs
Normal file
@@ -0,0 +1,209 @@
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
pub struct EventPublisherConfig {
|
||||
pub channel_buffer: usize,
|
||||
}
|
||||
|
||||
impl EventPublisherConfig {
|
||||
pub fn from_env() -> Self {
|
||||
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(128);
|
||||
Self { channel_buffer }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EventHandler: Send + Sync {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||
}
|
||||
|
||||
pub struct ChannelEventPublisher {
|
||||
sender: mpsc::Sender<DomainEvent>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for ChannelEventPublisher {
|
||||
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
self.sender
|
||||
.send(event.clone())
|
||||
.await
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EventWorker {
|
||||
receiver: mpsc::Receiver<DomainEvent>,
|
||||
handlers: Vec<Box<dyn EventHandler>>,
|
||||
}
|
||||
|
||||
impl EventWorker {
|
||||
pub async fn run(mut self) {
|
||||
while let Some(event) = self.receiver.recv().await {
|
||||
match &event {
|
||||
DomainEvent::ReviewLogged {
|
||||
review_id,
|
||||
movie_id,
|
||||
user_id,
|
||||
rating,
|
||||
watched_at,
|
||||
} => {
|
||||
tracing::info!(
|
||||
review_id = %review_id.value(),
|
||||
movie_id = %movie_id.value(),
|
||||
user_id = %user_id.value(),
|
||||
rating = rating.value(),
|
||||
watched_at = %watched_at,
|
||||
"event: review_logged"
|
||||
);
|
||||
}
|
||||
DomainEvent::MovieDiscovered {
|
||||
movie_id,
|
||||
external_metadata_id,
|
||||
} => {
|
||||
tracing::info!(
|
||||
movie_id = %movie_id.value(),
|
||||
external_id = external_metadata_id.value(),
|
||||
"event: movie_discovered"
|
||||
);
|
||||
}
|
||||
}
|
||||
for handler in &self.handlers {
|
||||
if let Err(e) = handler.handle(&event).await {
|
||||
tracing::error!("event handler error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
tracing::info!("event worker shut down");
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NoopEventPublisher;
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for NoopEventPublisher {
|
||||
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_event_channel(
|
||||
config: EventPublisherConfig,
|
||||
handlers: Vec<Box<dyn EventHandler>>,
|
||||
) -> (ChannelEventPublisher, EventWorker) {
|
||||
let (tx, rx) = mpsc::channel(config.channel_buffer);
|
||||
(
|
||||
ChannelEventPublisher { sender: tx },
|
||||
EventWorker {
|
||||
receiver: rx,
|
||||
handlers,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
value_objects::{ExternalMetadataId, MovieId},
|
||||
};
|
||||
|
||||
struct RecordingHandler {
|
||||
calls: Arc<Mutex<Vec<String>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventHandler for RecordingHandler {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
let label = match event {
|
||||
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
|
||||
DomainEvent::ReviewLogged { .. } => "review_logged",
|
||||
};
|
||||
self.calls.lock().unwrap().push(label.to_string());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn single_handler_receives_event() {
|
||||
let calls = Arc::new(Mutex::new(vec![]));
|
||||
let handler = RecordingHandler { calls: Arc::clone(&calls) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
|
||||
|
||||
let handle = tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
drop(publisher);
|
||||
handle.await.unwrap();
|
||||
|
||||
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn multiple_handlers_all_receive_event() {
|
||||
let calls1 = Arc::new(Mutex::new(vec![]));
|
||||
let calls2 = Arc::new(Mutex::new(vec![]));
|
||||
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
|
||||
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(
|
||||
config,
|
||||
vec![Box::new(handler1), Box::new(handler2)],
|
||||
);
|
||||
|
||||
let handle = tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
drop(publisher);
|
||||
handle.await.unwrap();
|
||||
|
||||
assert_eq!(calls1.lock().unwrap().len(), 1);
|
||||
assert_eq!(calls2.lock().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn handler_error_does_not_stop_worker() {
|
||||
struct FailingHandler;
|
||||
#[async_trait]
|
||||
impl EventHandler for FailingHandler {
|
||||
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
|
||||
Err(DomainError::InfrastructureError("boom".into()))
|
||||
}
|
||||
}
|
||||
|
||||
let calls = Arc::new(Mutex::new(vec![]));
|
||||
let good = RecordingHandler { calls: Arc::clone(&calls) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(
|
||||
config,
|
||||
vec![Box::new(FailingHandler), Box::new(good)],
|
||||
);
|
||||
|
||||
let handle = tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
drop(publisher);
|
||||
handle.await.unwrap();
|
||||
|
||||
assert_eq!(calls.lock().unwrap().len(), 1);
|
||||
}
|
||||
}
|
||||
@@ -4,3 +4,7 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
domain = { workspace = true }
|
||||
|
||||
@@ -1,14 +1,54 @@
|
||||
pub fn add(left: u64, right: u64) -> u64 {
|
||||
left + right
|
||||
use async_trait::async_trait;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::Movie,
|
||||
ports::{MetadataClient, MetadataSearchCriteria},
|
||||
value_objects::{ExternalMetadataId, MovieTitle, PosterUrl, ReleaseYear},
|
||||
};
|
||||
|
||||
mod omdb;
|
||||
|
||||
pub(crate) struct ProviderMovie {
|
||||
pub imdb_id: ExternalMetadataId,
|
||||
pub title: MovieTitle,
|
||||
pub release_year: ReleaseYear,
|
||||
pub director: Option<String>,
|
||||
pub poster_url: Option<PosterUrl>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[async_trait]
|
||||
pub(crate) trait MetadataProvider: Send + Sync {
|
||||
async fn fetch(&self, criteria: &MetadataSearchCriteria) -> Result<ProviderMovie, DomainError>;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = add(2, 2);
|
||||
assert_eq!(result, 4);
|
||||
pub struct MetadataClientImpl {
|
||||
provider: Box<dyn MetadataProvider>,
|
||||
}
|
||||
|
||||
impl MetadataClientImpl {
|
||||
pub fn new_omdb(api_key: String) -> Self {
|
||||
Self {
|
||||
provider: Box::new(omdb::OmdbProvider::new(api_key)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for MetadataClientImpl {
|
||||
async fn fetch_movie_metadata(
|
||||
&self,
|
||||
criteria: &MetadataSearchCriteria,
|
||||
) -> Result<Movie, DomainError> {
|
||||
let pm = self.provider.fetch(criteria).await?;
|
||||
Ok(Movie::new(Some(pm.imdb_id), pm.title, pm.release_year, pm.director, None))
|
||||
}
|
||||
|
||||
async fn get_poster_url(
|
||||
&self,
|
||||
external_metadata_id: &ExternalMetadataId,
|
||||
) -> Result<Option<PosterUrl>, DomainError> {
|
||||
let criteria = MetadataSearchCriteria::ImdbId(external_metadata_id.clone());
|
||||
let pm = self.provider.fetch(&criteria).await?;
|
||||
Ok(pm.poster_url)
|
||||
}
|
||||
}
|
||||
|
||||
119
crates/adapters/metadata/src/omdb.rs
Normal file
119
crates/adapters/metadata/src/omdb.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use async_trait::async_trait;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
ports::MetadataSearchCriteria,
|
||||
value_objects::{ExternalMetadataId, MovieTitle, PosterUrl, ReleaseYear},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{MetadataProvider, ProviderMovie};
|
||||
|
||||
pub(crate) struct OmdbProvider {
|
||||
client: reqwest::Client,
|
||||
api_key: String,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl OmdbProvider {
|
||||
pub(crate) fn new(api_key: String) -> Self {
|
||||
Self {
|
||||
client: reqwest::Client::new(),
|
||||
api_key,
|
||||
base_url: "http://www.omdbapi.com/".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OmdbResponse {
|
||||
#[serde(rename = "Title")]
|
||||
title: String,
|
||||
#[serde(rename = "Year")]
|
||||
year: String,
|
||||
#[serde(rename = "Director")]
|
||||
director: String,
|
||||
#[serde(rename = "Poster")]
|
||||
poster: String,
|
||||
#[serde(rename = "imdbID")]
|
||||
imdb_id: String,
|
||||
#[serde(rename = "Response")]
|
||||
response: String,
|
||||
#[serde(rename = "Error")]
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataProvider for OmdbProvider {
|
||||
async fn fetch(&self, criteria: &MetadataSearchCriteria) -> Result<ProviderMovie, DomainError> {
|
||||
let mut url = reqwest::Url::parse(&self.base_url)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
|
||||
{
|
||||
let mut params = url.query_pairs_mut();
|
||||
params.append_pair("apikey", &self.api_key);
|
||||
match criteria {
|
||||
MetadataSearchCriteria::ImdbId(id) => {
|
||||
params.append_pair("i", id.value());
|
||||
}
|
||||
MetadataSearchCriteria::Title { title, year } => {
|
||||
params.append_pair("t", title);
|
||||
if let Some(y) = year {
|
||||
params.append_pair("y", &y.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let http_resp = self
|
||||
.client
|
||||
.get(url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?
|
||||
.error_for_status()
|
||||
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?;
|
||||
|
||||
let resp: OmdbResponse = http_resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e: reqwest::Error| DomainError::InfrastructureError(e.to_string()))?;
|
||||
|
||||
if resp.response != "True" {
|
||||
let msg = resp.error.unwrap_or_default();
|
||||
return if msg.to_lowercase().contains("not found") {
|
||||
Err(DomainError::NotFound(msg))
|
||||
} else {
|
||||
Err(DomainError::InfrastructureError(msg))
|
||||
};
|
||||
}
|
||||
|
||||
let year: u16 = resp
|
||||
.year
|
||||
.chars()
|
||||
.take(4)
|
||||
.collect::<String>()
|
||||
.parse()
|
||||
.map_err(|_| {
|
||||
DomainError::InfrastructureError(format!("Unparseable year: {}", resp.year))
|
||||
})?;
|
||||
|
||||
let imdb_id = ExternalMetadataId::new(resp.imdb_id)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let title = MovieTitle::new(resp.title)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let release_year = ReleaseYear::new(year)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
|
||||
let director = match resp.director.as_str() {
|
||||
"N/A" | "" => None,
|
||||
d => Some(d.to_string()),
|
||||
};
|
||||
|
||||
let poster_url = match resp.poster.as_str() {
|
||||
"N/A" | "" => None,
|
||||
url => PosterUrl::new(url.to_string()).ok(),
|
||||
};
|
||||
|
||||
Ok(ProviderMovie { imdb_id, title, release_year, director, poster_url })
|
||||
}
|
||||
}
|
||||
10
crates/adapters/poster-fetcher/Cargo.toml
Normal file
10
crates/adapters/poster-fetcher/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "poster-fetcher"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
domain = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
13
crates/adapters/poster-fetcher/src/config.rs
Normal file
13
crates/adapters/poster-fetcher/src/config.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
pub struct PosterFetcherConfig {
|
||||
pub timeout_seconds: u64,
|
||||
}
|
||||
|
||||
impl PosterFetcherConfig {
|
||||
pub fn from_env() -> Self {
|
||||
let timeout_seconds = std::env::var("POSTER_FETCH_TIMEOUT_SECONDS")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(30);
|
||||
Self { timeout_seconds }
|
||||
}
|
||||
}
|
||||
38
crates/adapters/poster-fetcher/src/lib.rs
Normal file
38
crates/adapters/poster-fetcher/src/lib.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
mod config;
|
||||
pub use config::PosterFetcherConfig;
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, ports::PosterFetcherClient, value_objects::PosterUrl};
|
||||
|
||||
pub struct ReqwestPosterFetcher {
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
impl ReqwestPosterFetcher {
|
||||
pub fn new(config: PosterFetcherConfig) -> anyhow::Result<Self> {
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(Duration::from_secs(config.timeout_seconds))
|
||||
.build()?;
|
||||
Ok(Self { client })
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterFetcherClient for ReqwestPosterFetcher {
|
||||
async fn fetch_poster_bytes(&self, poster_url: &PosterUrl) -> Result<Vec<u8>, DomainError> {
|
||||
let bytes = self
|
||||
.client
|
||||
.get(poster_url.value())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
|
||||
.error_for_status()
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
Ok(bytes.to_vec())
|
||||
}
|
||||
}
|
||||
16
crates/adapters/poster-storage/Cargo.toml
Normal file
16
crates/adapters/poster-storage/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "poster-storage"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
domain = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
object_store = { workspace = true }
|
||||
infer = "0.19.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
38
crates/adapters/poster-storage/src/config.rs
Normal file
38
crates/adapters/poster-storage/src/config.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use anyhow::Context;
|
||||
use object_store::{aws::AmazonS3Builder, ObjectStore};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct StorageConfig {
|
||||
endpoint: String,
|
||||
access_key_id: String,
|
||||
secret_access_key: String,
|
||||
bucket: String,
|
||||
region: String,
|
||||
}
|
||||
|
||||
impl StorageConfig {
|
||||
pub fn from_env() -> anyhow::Result<Self> {
|
||||
Ok(Self {
|
||||
endpoint: std::env::var("MINIO_ENDPOINT").context("MINIO_ENDPOINT required")?,
|
||||
access_key_id: std::env::var("MINIO_ACCESS_KEY_ID")
|
||||
.context("MINIO_ACCESS_KEY_ID required")?,
|
||||
secret_access_key: std::env::var("MINIO_SECRET_ACCESS_KEY")
|
||||
.context("MINIO_SECRET_ACCESS_KEY required")?,
|
||||
bucket: std::env::var("MINIO_BUCKET").context("MINIO_BUCKET required")?,
|
||||
region: std::env::var("MINIO_REGION").unwrap_or_else(|_| "minio".to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn build_store(self) -> anyhow::Result<Arc<dyn ObjectStore>> {
|
||||
let store = AmazonS3Builder::new()
|
||||
.with_endpoint(self.endpoint)
|
||||
.with_access_key_id(self.access_key_id)
|
||||
.with_secret_access_key(self.secret_access_key)
|
||||
.with_bucket_name(self.bucket)
|
||||
.with_region(self.region)
|
||||
.with_allow_http(true)
|
||||
.build()
|
||||
.context("Failed to build S3/Minio store")?;
|
||||
Ok(Arc::new(store))
|
||||
}
|
||||
}
|
||||
95
crates/adapters/poster-storage/src/lib.rs
Normal file
95
crates/adapters/poster-storage/src/lib.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
mod config;
|
||||
pub use config::StorageConfig;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
ports::PosterStorage,
|
||||
value_objects::{MovieId, PosterPath},
|
||||
};
|
||||
use object_store::{Attribute, Attributes, PutOptions, path::Path, ObjectStore};
|
||||
use std::sync::Arc;
|
||||
|
||||
fn detect_mime(bytes: &[u8]) -> &'static str {
|
||||
infer::get(bytes)
|
||||
.map(|t| t.mime_type())
|
||||
.unwrap_or("application/octet-stream")
|
||||
}
|
||||
|
||||
pub struct PosterStorageAdapter {
|
||||
store: Arc<dyn ObjectStore>,
|
||||
}
|
||||
|
||||
impl PosterStorageAdapter {
|
||||
pub fn new(store: Arc<dyn ObjectStore>) -> Self {
|
||||
Self { store }
|
||||
}
|
||||
|
||||
pub fn from_config(config: StorageConfig) -> anyhow::Result<Self> {
|
||||
Ok(Self::new(config.build_store()?))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterStorage for PosterStorageAdapter {
|
||||
async fn store_poster(
|
||||
&self,
|
||||
movie_id: &MovieId,
|
||||
image_bytes: &[u8],
|
||||
) -> Result<PosterPath, DomainError> {
|
||||
let path = Path::from(movie_id.value().to_string());
|
||||
let mime = detect_mime(image_bytes);
|
||||
let mut attributes = Attributes::new();
|
||||
attributes.insert(Attribute::ContentType, mime.into());
|
||||
let opts = PutOptions { attributes, ..Default::default() };
|
||||
self.store
|
||||
.put_opts(&path, image_bytes.to_vec().into(), opts)
|
||||
.await
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
PosterPath::new(path.to_string())
|
||||
}
|
||||
|
||||
async fn get_poster(&self, poster_path: &PosterPath) -> Result<Vec<u8>, DomainError> {
|
||||
let path = Path::from(poster_path.value().to_string());
|
||||
let result = self.store.get(&path).await.map_err(|e| match e {
|
||||
object_store::Error::NotFound { .. } => DomainError::NotFound("Poster not found".into()),
|
||||
_ => DomainError::InfrastructureError(e.to_string()),
|
||||
})?;
|
||||
result
|
||||
.bytes()
|
||||
.await
|
||||
.map(|b| b.to_vec())
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use object_store::memory::InMemory;
|
||||
use uuid::Uuid;
|
||||
|
||||
fn adapter() -> PosterStorageAdapter {
|
||||
PosterStorageAdapter::new(Arc::new(InMemory::new()))
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn store_and_retrieve_round_trip() {
|
||||
let adapter = adapter();
|
||||
let movie_id = MovieId::from_uuid(Uuid::new_v4());
|
||||
let bytes = b"fake-image-bytes";
|
||||
|
||||
let path = adapter.store_poster(&movie_id, bytes).await.unwrap();
|
||||
let retrieved = adapter.get_poster(&path).await.unwrap();
|
||||
|
||||
assert_eq!(retrieved, bytes);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_missing_returns_not_found() {
|
||||
let adapter = adapter();
|
||||
let path = PosterPath::new("nonexistent".into()).unwrap();
|
||||
let result = adapter.get_poster(&path).await;
|
||||
assert!(matches!(result, Err(DomainError::NotFound(_))));
|
||||
}
|
||||
}
|
||||
@@ -4,3 +4,7 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
rss-feed = { package = "rss", version = "2" }
|
||||
chrono = { workspace = true }
|
||||
domain = { workspace = true }
|
||||
application = { workspace = true }
|
||||
|
||||
@@ -1,5 +1,59 @@
|
||||
pub fn add(left: u64, right: u64) -> u64 {
|
||||
left + right
|
||||
use application::ports::RssFeedRenderer;
|
||||
use domain::models::DiaryEntry;
|
||||
use rss_feed::{ChannelBuilder, GuidBuilder, ItemBuilder};
|
||||
|
||||
pub struct RssAdapter {
|
||||
feed_link: String,
|
||||
}
|
||||
|
||||
impl RssAdapter {
|
||||
pub fn new(feed_link: String) -> Self {
|
||||
Self { feed_link }
|
||||
}
|
||||
}
|
||||
|
||||
impl RssFeedRenderer for RssAdapter {
|
||||
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String> {
|
||||
let items = entries
|
||||
.iter()
|
||||
.map(|e| {
|
||||
let item_title = format!(
|
||||
"{} ({})",
|
||||
e.movie().title().value(),
|
||||
e.movie().release_year().value()
|
||||
);
|
||||
let description = match e.review().comment() {
|
||||
Some(c) => format!("{}/5 — {}", e.review().rating().value(), c.value()),
|
||||
None => format!("{}/5", e.review().rating().value()),
|
||||
};
|
||||
let pub_date = e
|
||||
.review()
|
||||
.watched_at()
|
||||
.and_utc()
|
||||
.format("%a, %d %b %Y %H:%M:%S +0000")
|
||||
.to_string();
|
||||
let guid = GuidBuilder::default()
|
||||
.value(e.review().id().value().to_string())
|
||||
.permalink(false)
|
||||
.build();
|
||||
ItemBuilder::default()
|
||||
.title(Some(item_title))
|
||||
.description(Some(description))
|
||||
.pub_date(Some(pub_date))
|
||||
.guid(Some(guid))
|
||||
.build()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let channel = ChannelBuilder::default()
|
||||
.title(title.to_string())
|
||||
.link(self.feed_link.clone())
|
||||
.description(title.to_string())
|
||||
.items(items)
|
||||
.build();
|
||||
|
||||
Ok(channel.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -7,8 +61,16 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = add(2, 2);
|
||||
assert_eq!(result, 4);
|
||||
fn render_feed_uses_provided_title() {
|
||||
let adapter = RssAdapter::new("http://example.com".into());
|
||||
let xml = adapter.render_feed(&[], "Custom Title").unwrap();
|
||||
assert!(xml.contains("<title>Custom Title</title>"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn render_feed_empty_entries_produces_valid_xml() {
|
||||
let adapter = RssAdapter::new("http://example.com".into());
|
||||
let xml = adapter.render_feed(&[], "My Feed").unwrap();
|
||||
assert!(xml.starts_with("<?xml") || xml.starts_with("<rss"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "01a08873b7fa815ad98a56a0902b60414cfcdc2c7a8570351320c4bc425347c6"
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "026e2afeb573707cb360fcdab8f6137aabfaf603b5ed57b98ac2888b4a0389ff"
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) FROM reviews",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "COUNT(*)",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "0963b9661182e139cd760bbabb0d6ea3a301a2a3adbdfdda4a88f333a1144c77"
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "email",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "password_hash",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE title = ? AND release_year = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "3047579c6ed13ce87aad9b9ce6300c02f0df3516979518976e13f9d9abc6a403"
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "33d0dae7d16b0635c1c7eb5afd10824bb55af7cc7a854f590d326622863759d1"
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "47f7cf95ce3450635b643ab710cadba96f40319140834d510bc5207b2552e055"
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) FROM reviews WHERE movie_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "COUNT(*)",
|
||||
"ordinal": 0,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "4b3074b532342c6356ee0e8e4d8c4a830f016234bb690e1f6240f02824d6d84f"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 7
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "630e092fcd33bc312befef352a98225e6e18e6079644b949258a39bf4b0fe3e5"
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE external_metadata_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "7bc4aebcb94547976d3d7e063e4e908fc22b977b3cbf063ee93ffe4648c42011"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n external_metadata_id = excluded.external_metadata_id,\n title = excluded.title,\n release_year = excluded.release_year,\n director = excluded.director,\n poster_path = excluded.poster_path",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 6
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "7d7e23355ee0e442f2aa27e898dcfa40bdc4b09391afe04325f076157d9d84aa"
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "af883f8b78f185077e2d3dcfaa0a6e62fbdfbf00c97c9b33b699dc631476181d"
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "external_metadata_id",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_year",
|
||||
"ordinal": 3,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "director",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "poster_path",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "review_id",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "movie_id",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"ordinal": 9,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "comment",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "watched_at",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "affe1eb261283c09d4b1ce6e684681755f079a044ffec8ff2bd79cfd8efe16b8"
|
||||
}
|
||||
6
crates/adapters/sqlite/migrations/0002_users.sql
Normal file
6
crates/adapters/sqlite/migrations/0002_users.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id TEXT PRIMARY KEY NOT NULL,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL
|
||||
);
|
||||
@@ -3,18 +3,38 @@ use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{
|
||||
DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, SortDirection,
|
||||
collections::Paginated,
|
||||
DiaryEntry, DiaryFilter, DirectorStat, FeedEntry, Movie, MonthlyRating,
|
||||
Review, ReviewHistory, SortDirection, UserStats, UserTrends,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
ports::MovieRepository,
|
||||
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear},
|
||||
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear, ReviewId, UserId},
|
||||
};
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
mod migrations;
|
||||
mod models;
|
||||
mod users;
|
||||
|
||||
use models::{DiaryRow, MovieRow, ReviewRow, datetime_to_str};
|
||||
use models::{
|
||||
DiaryRow, DirectorCountRow, FeedRow, MonthlyRatingRow, MovieRow, ReviewRow,
|
||||
UserTotalsRow, datetime_to_str,
|
||||
};
|
||||
|
||||
pub use users::SqliteUserRepository;
|
||||
|
||||
fn format_year_month(ym: &str) -> String {
|
||||
let parts: Vec<&str> = ym.splitn(2, '-').collect();
|
||||
if parts.len() != 2 { return ym.to_string(); }
|
||||
let year = parts[0].get(2..).unwrap_or(parts[0]);
|
||||
let month = match parts[1] {
|
||||
"01" => "Jan", "02" => "Feb", "03" => "Mar", "04" => "Apr",
|
||||
"05" => "May", "06" => "Jun", "07" => "Jul", "08" => "Aug",
|
||||
"09" => "Sep", "10" => "Oct", "11" => "Nov", "12" => "Dec",
|
||||
_ => parts[1],
|
||||
};
|
||||
format!("{} '{}", month, year)
|
||||
}
|
||||
|
||||
pub struct SqliteMovieRepository {
|
||||
pool: SqlitePool,
|
||||
@@ -49,17 +69,15 @@ impl SqliteMovieRepository {
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_diary_rows(
|
||||
async fn fetch_all_diary_rows(
|
||||
&self,
|
||||
movie_id: Option<&str>,
|
||||
sort: &SortDirection,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
// sqlx macros require literal ORDER BY values; separate branches also let the
|
||||
// query planner use the movie_id index instead of falling back to a filtered scan.
|
||||
match (movie_id, sort) {
|
||||
(None, SortDirection::Descending) => sqlx::query_as!(
|
||||
match sort {
|
||||
// ByRatingDesc only applies to user-scoped queries; falls back to date sort here
|
||||
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
@@ -74,7 +92,7 @@ impl SqliteMovieRepository {
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
(None, SortDirection::Ascending) => sqlx::query_as!(
|
||||
SortDirection::Ascending => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
@@ -88,42 +106,188 @@ impl SqliteMovieRepository {
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
(Some(id), SortDirection::Descending) => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
(Some(id), SortDirection::Ascending) => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at ASC
|
||||
LIMIT ? OFFSET ?",
|
||||
id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_movie_diary_rows(
|
||||
&self,
|
||||
movie_id: &str,
|
||||
sort: &SortDirection,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
match sort {
|
||||
// ByRatingDesc only applies to user-scoped queries; falls back to date sort here
|
||||
SortDirection::Descending | SortDirection::ByRatingDesc => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
movie_id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
|
||||
SortDirection::Ascending => sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.movie_id = ?
|
||||
ORDER BY r.watched_at ASC
|
||||
LIMIT ? OFFSET ?",
|
||||
movie_id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err),
|
||||
}
|
||||
}
|
||||
|
||||
async fn count_user_diary_entries(&self, user_id: &str) -> Result<i64, DomainError> {
|
||||
sqlx::query_scalar!(
|
||||
"SELECT COUNT(*) FROM reviews WHERE user_id = ?",
|
||||
user_id
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_diary_rows_by_watched(
|
||||
&self,
|
||||
user_id: &str,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ?
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
user_id, limit, offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_diary_rows_by_rating(
|
||||
&self,
|
||||
user_id: &str,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<DiaryRow>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ?
|
||||
ORDER BY r.rating DESC, r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
user_id, limit, offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn count_feed_entries(&self) -> Result<i64, DomainError> {
|
||||
sqlx::query_scalar!("SELECT COUNT(*) FROM reviews")
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_feed_rows(
|
||||
&self,
|
||||
limit: i64,
|
||||
offset: i64,
|
||||
) -> Result<Vec<FeedRow>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
FeedRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at,
|
||||
u.email AS user_email
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
INNER JOIN users u ON u.id = r.user_id
|
||||
ORDER BY r.watched_at DESC
|
||||
LIMIT ? OFFSET ?",
|
||||
limit, offset
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_totals(&self, user_id: &str) -> Result<UserTotalsRow, DomainError> {
|
||||
sqlx::query_as!(
|
||||
UserTotalsRow,
|
||||
r#"SELECT COUNT(DISTINCT movie_id) AS "total!: i64",
|
||||
AVG(CAST(rating AS REAL)) AS avg_rating
|
||||
FROM reviews WHERE user_id = ?"#,
|
||||
user_id
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)
|
||||
}
|
||||
|
||||
async fn fetch_user_favorite_director(
|
||||
&self,
|
||||
user_id: &str,
|
||||
) -> Result<Option<String>, DomainError> {
|
||||
let row = sqlx::query_scalar!(
|
||||
"SELECT m.director
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ? AND m.director IS NOT NULL
|
||||
GROUP BY m.director
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 1",
|
||||
user_id
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(row.flatten())
|
||||
}
|
||||
|
||||
async fn fetch_user_most_active_month(
|
||||
&self,
|
||||
user_id: &str,
|
||||
) -> Result<Option<String>, DomainError> {
|
||||
let result: Option<Option<String>> = sqlx::query_scalar!(
|
||||
"SELECT strftime('%Y-%m', watched_at) AS month
|
||||
FROM reviews
|
||||
WHERE user_id = ?
|
||||
GROUP BY month
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 1",
|
||||
user_id
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(result.flatten())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -248,14 +412,40 @@ impl MovieRepository for SqliteMovieRepository {
|
||||
}
|
||||
|
||||
async fn query_diary(&self, filter: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
let movie_id: Option<String> = filter.movie_id.as_ref().map(|id| id.value().to_string());
|
||||
let limit = filter.page.limit as i64;
|
||||
let offset = filter.page.offset as i64;
|
||||
|
||||
let (total, rows) = tokio::try_join!(
|
||||
self.count_diary_entries(movie_id.as_deref()),
|
||||
self.fetch_diary_rows(movie_id.as_deref(), &filter.sort_by, limit, offset)
|
||||
)?;
|
||||
let (total, rows) = match (&filter.movie_id, &filter.user_id) {
|
||||
(None, None) => tokio::try_join!(
|
||||
self.count_diary_entries(None),
|
||||
self.fetch_all_diary_rows(&filter.sort_by, limit, offset)
|
||||
)?,
|
||||
(Some(id), None) => {
|
||||
let id_str = id.value().to_string();
|
||||
tokio::try_join!(
|
||||
self.count_diary_entries(Some(id_str.as_str())),
|
||||
self.fetch_movie_diary_rows(&id_str, &filter.sort_by, limit, offset)
|
||||
)?
|
||||
}
|
||||
(None, Some(uid)) => {
|
||||
let uid_str = uid.value().to_string();
|
||||
match &filter.sort_by {
|
||||
SortDirection::ByRatingDesc => tokio::try_join!(
|
||||
self.count_user_diary_entries(&uid_str),
|
||||
self.fetch_user_diary_rows_by_rating(&uid_str, limit, offset)
|
||||
)?,
|
||||
_ => tokio::try_join!(
|
||||
self.count_user_diary_entries(&uid_str),
|
||||
self.fetch_user_diary_rows_by_watched(&uid_str, limit, offset)
|
||||
)?,
|
||||
}
|
||||
}
|
||||
(Some(_), Some(_)) => {
|
||||
return Err(DomainError::ValidationError(
|
||||
"Combined movie_id + user_id filter not supported".into(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let items = rows
|
||||
.into_iter()
|
||||
@@ -270,6 +460,39 @@ impl MovieRepository for SqliteMovieRepository {
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError> {
|
||||
let id = review_id.value().to_string();
|
||||
sqlx::query_as!(
|
||||
ReviewRow,
|
||||
"SELECT id, movie_id, user_id, rating, comment, watched_at, created_at
|
||||
FROM reviews WHERE id = ?",
|
||||
id
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?
|
||||
.map(ReviewRow::to_domain)
|
||||
.transpose()
|
||||
}
|
||||
|
||||
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError> {
|
||||
let id = review_id.value().to_string();
|
||||
sqlx::query!("DELETE FROM reviews WHERE id = ?", id)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError> {
|
||||
let id = movie_id.value().to_string();
|
||||
sqlx::query!("DELETE FROM movies WHERE id = ?", id)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
let id_str = movie_id.value().to_string();
|
||||
|
||||
@@ -300,4 +523,119 @@ impl MovieRepository for SqliteMovieRepository {
|
||||
|
||||
Ok(ReviewHistory::new(movie, viewings))
|
||||
}
|
||||
|
||||
async fn query_activity_feed(
|
||||
&self,
|
||||
page: &PageParams,
|
||||
) -> Result<Paginated<FeedEntry>, DomainError> {
|
||||
let limit = page.limit as i64;
|
||||
let offset = page.offset as i64;
|
||||
|
||||
let (total, rows) = tokio::try_join!(
|
||||
self.count_feed_entries(),
|
||||
self.fetch_feed_rows(limit, offset)
|
||||
)?;
|
||||
|
||||
let items = rows
|
||||
.into_iter()
|
||||
.map(FeedRow::to_domain)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Ok(Paginated {
|
||||
items,
|
||||
total_count: total as u64,
|
||||
limit: page.limit,
|
||||
offset: page.offset,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError> {
|
||||
let uid = user_id.value().to_string();
|
||||
|
||||
let (totals, fav_director, most_active) = tokio::try_join!(
|
||||
self.fetch_user_totals(&uid),
|
||||
self.fetch_user_favorite_director(&uid),
|
||||
self.fetch_user_most_active_month(&uid)
|
||||
)?;
|
||||
|
||||
let most_active_month = most_active.map(|ym| format_year_month(&ym));
|
||||
|
||||
Ok(UserStats {
|
||||
total_movies: totals.total,
|
||||
avg_rating: totals.avg_rating,
|
||||
favorite_director: fav_director,
|
||||
most_active_month,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError> {
|
||||
let uid = user_id.value().to_string();
|
||||
let rows = sqlx::query_as!(
|
||||
DiaryRow,
|
||||
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
|
||||
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ?
|
||||
ORDER BY r.watched_at DESC",
|
||||
uid
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
rows.into_iter().map(DiaryRow::to_domain).collect()
|
||||
}
|
||||
|
||||
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError> {
|
||||
let uid = user_id.value().to_string();
|
||||
|
||||
let (rating_rows, director_rows) = tokio::try_join!(
|
||||
sqlx::query_as!(
|
||||
MonthlyRatingRow,
|
||||
r#"SELECT strftime('%Y-%m', watched_at) AS "month!",
|
||||
AVG(CAST(rating AS REAL)) AS "avg_rating!: f64",
|
||||
COUNT(*) AS "count!: i64"
|
||||
FROM reviews
|
||||
WHERE user_id = ? AND watched_at >= datetime('now', '-12 months')
|
||||
GROUP BY "month!"
|
||||
ORDER BY "month!" ASC"#,
|
||||
uid
|
||||
)
|
||||
.fetch_all(&self.pool),
|
||||
sqlx::query_as!(
|
||||
DirectorCountRow,
|
||||
r#"SELECT m.director AS "director!",
|
||||
COUNT(*) AS "count!: i64"
|
||||
FROM reviews r
|
||||
INNER JOIN movies m ON m.id = r.movie_id
|
||||
WHERE r.user_id = ? AND m.director IS NOT NULL
|
||||
GROUP BY m.director
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 5"#,
|
||||
uid
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
)
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
let max_director_count = director_rows.iter().map(|d| d.count).max().unwrap_or(1);
|
||||
|
||||
let monthly_ratings = rating_rows
|
||||
.into_iter()
|
||||
.map(|r| MonthlyRating {
|
||||
month_label: format_year_month(&r.month),
|
||||
year_month: r.month,
|
||||
avg_rating: r.avg_rating,
|
||||
count: r.count,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let top_directors = director_rows
|
||||
.into_iter()
|
||||
.map(|d| DirectorStat { director: d.director, count: d.count })
|
||||
.collect();
|
||||
|
||||
Ok(UserTrends { monthly_ratings, top_directors, max_director_count })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::{DiaryEntry, Movie, Review},
|
||||
models::{DiaryEntry, FeedEntry, Movie, Review, UserSummary},
|
||||
value_objects::{
|
||||
Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear,
|
||||
ReviewId, UserId,
|
||||
@@ -111,6 +111,85 @@ impl DiaryRow {
|
||||
}
|
||||
}
|
||||
|
||||
// Like DiaryRow but includes user_email from JOIN with users table
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct FeedRow {
|
||||
pub id: String,
|
||||
pub external_metadata_id: Option<String>,
|
||||
pub title: String,
|
||||
pub release_year: i64,
|
||||
pub director: Option<String>,
|
||||
pub poster_path: Option<String>,
|
||||
pub review_id: String,
|
||||
pub movie_id: String,
|
||||
pub user_id: String,
|
||||
pub rating: i64,
|
||||
pub comment: Option<String>,
|
||||
pub watched_at: String,
|
||||
pub created_at: String,
|
||||
pub user_email: String,
|
||||
}
|
||||
|
||||
impl FeedRow {
|
||||
pub fn to_domain(self) -> Result<FeedEntry, DomainError> {
|
||||
let diary = DiaryRow {
|
||||
id: self.id,
|
||||
external_metadata_id: self.external_metadata_id,
|
||||
title: self.title,
|
||||
release_year: self.release_year,
|
||||
director: self.director,
|
||||
poster_path: self.poster_path,
|
||||
review_id: self.review_id,
|
||||
movie_id: self.movie_id,
|
||||
user_id: self.user_id,
|
||||
rating: self.rating,
|
||||
comment: self.comment,
|
||||
watched_at: self.watched_at,
|
||||
created_at: self.created_at,
|
||||
}
|
||||
.to_domain()?;
|
||||
Ok(FeedEntry::new(diary, self.user_email))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct UserSummaryRow {
|
||||
pub id: String,
|
||||
pub email: String,
|
||||
pub total_movies: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
}
|
||||
|
||||
impl UserSummaryRow {
|
||||
pub fn to_domain(self) -> Result<UserSummary, DomainError> {
|
||||
Ok(UserSummary {
|
||||
user_id: UserId::from_uuid(parse_uuid(&self.id)?),
|
||||
email: self.email,
|
||||
total_movies: self.total_movies,
|
||||
avg_rating: self.avg_rating,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct UserTotalsRow {
|
||||
pub total: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct DirectorCountRow {
|
||||
pub director: String,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow)]
|
||||
pub(crate) struct MonthlyRatingRow {
|
||||
pub month: String,
|
||||
pub avg_rating: f64,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> {
|
||||
Uuid::parse_str(s)
|
||||
.map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e)))
|
||||
|
||||
172
crates/adapters/sqlite/src/users.rs
Normal file
172
crates/adapters/sqlite/src/users.rs
Normal file
@@ -0,0 +1,172 @@
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::User,
|
||||
ports::UserRepository,
|
||||
value_objects::{Email, PasswordHash, UserId},
|
||||
};
|
||||
use super::models::UserSummaryRow;
|
||||
|
||||
pub struct SqliteUserRepository {
|
||||
pool: SqlitePool,
|
||||
}
|
||||
|
||||
impl SqliteUserRepository {
|
||||
pub fn new(pool: SqlitePool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
fn map_err(e: sqlx::Error) -> DomainError {
|
||||
tracing::error!("Database error: {:?}", e);
|
||||
DomainError::InfrastructureError("Database operation failed".into())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl UserRepository for SqliteUserRepository {
|
||||
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError> {
|
||||
let email_str = email.value();
|
||||
let row = sqlx::query!(
|
||||
"SELECT id, email, password_hash FROM users WHERE email = ?",
|
||||
email_str
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
match row {
|
||||
None => Ok(None),
|
||||
Some(r) => {
|
||||
let id = uuid::Uuid::parse_str(&r.id)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let email = Email::new(r.email)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let hash = PasswordHash::new(r.password_hash)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
Ok(Some(User::from_persistence(UserId::from_uuid(id), email, hash)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn save(&self, user: &User) -> Result<(), DomainError> {
|
||||
let id = user.id().value().to_string();
|
||||
let email = user.email().value();
|
||||
let hash = user.password_hash().value();
|
||||
let created_at = Utc::now().to_rfc3339();
|
||||
|
||||
let result = sqlx::query!(
|
||||
"INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
|
||||
id,
|
||||
email,
|
||||
hash,
|
||||
created_at
|
||||
)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(DomainError::ValidationError("Email already registered".into()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError> {
|
||||
let id_str = id.value().to_string();
|
||||
let row = sqlx::query!(
|
||||
"SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||
id_str
|
||||
)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?;
|
||||
|
||||
match row {
|
||||
None => Ok(None),
|
||||
Some(r) => {
|
||||
let uuid = uuid::Uuid::parse_str(&r.id)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let email = Email::new(r.email)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
let hash = PasswordHash::new(r.password_hash)
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||
Ok(Some(User::from_persistence(UserId::from_uuid(uuid), email, hash)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> {
|
||||
sqlx::query_as!(
|
||||
UserSummaryRow,
|
||||
r#"SELECT u.id,
|
||||
u.email,
|
||||
COUNT(DISTINCT r.movie_id) AS "total_movies!: i64",
|
||||
AVG(CAST(r.rating AS REAL)) AS avg_rating
|
||||
FROM users u
|
||||
LEFT JOIN reviews r ON r.user_id = u.id
|
||||
GROUP BY u.id, u.email
|
||||
ORDER BY u.email ASC"#
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(Self::map_err)?
|
||||
.into_iter()
|
||||
.map(UserSummaryRow::to_domain)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
async fn setup() -> (SqlitePool, SqliteUserRepository) {
|
||||
let pool = SqlitePool::connect(":memory:").await.unwrap();
|
||||
sqlx::query(
|
||||
"CREATE TABLE users (id TEXT PRIMARY KEY, email TEXT NOT NULL UNIQUE, password_hash TEXT NOT NULL, created_at TEXT NOT NULL)"
|
||||
)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
let repo = SqliteUserRepository::new(pool.clone());
|
||||
(pool, repo)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn find_by_id_returns_none_when_not_found() {
|
||||
let (_, repo) = setup().await;
|
||||
let result = repo
|
||||
.find_by_id(&UserId::from_uuid(uuid::Uuid::new_v4()))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn find_by_id_returns_user_when_found() {
|
||||
let (pool, repo) = setup().await;
|
||||
let id = uuid::Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)"
|
||||
)
|
||||
.bind(id.to_string())
|
||||
.bind("test@example.com")
|
||||
.bind("$argon2id$v=19$m=65536,t=2,p=1$fakesalt$fakehash")
|
||||
.bind("2026-01-01T00:00:00Z")
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = repo
|
||||
.find_by_id(&UserId::from_uuid(id))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().email().value(), "test@example.com");
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ edition = "2024"
|
||||
askama = { version = "0.16.0" }
|
||||
|
||||
serde = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
|
||||
domain = { workspace = true }
|
||||
application = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
use askama::Template;
|
||||
use application::ports::HtmlRenderer;
|
||||
use domain::models::{DiaryEntry, collections::Paginated};
|
||||
use chrono::Datelike;
|
||||
use application::ports::{
|
||||
ActivityFeedPageData, HtmlPageContext, HtmlRenderer, LoginPageData,
|
||||
NewReviewPageData, ProfilePageData, RegisterPageData, UsersPageData,
|
||||
};
|
||||
use domain::models::{
|
||||
DiaryEntry, FeedEntry, MonthActivity, MonthlyRating, UserStats, UserSummary, UserTrends,
|
||||
collections::Paginated,
|
||||
};
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "diary.html")]
|
||||
@@ -9,6 +16,117 @@ struct DiaryTemplate<'a> {
|
||||
current_offset: u32,
|
||||
limit: u32,
|
||||
has_more: bool,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "login.html")]
|
||||
struct LoginTemplate<'a> {
|
||||
error: Option<&'a str>,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "register.html")]
|
||||
struct RegisterTemplate<'a> {
|
||||
error: Option<&'a str>,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "new_review.html")]
|
||||
struct NewReviewTemplate<'a> {
|
||||
error: Option<&'a str>,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "activity_feed.html")]
|
||||
struct ActivityFeedTemplate<'a> {
|
||||
entries: &'a [FeedEntry],
|
||||
current_offset: u32,
|
||||
limit: u32,
|
||||
has_more: bool,
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "users.html")]
|
||||
struct UsersTemplate<'a> {
|
||||
users: &'a [UserSummary],
|
||||
ctx: &'a HtmlPageContext,
|
||||
}
|
||||
|
||||
struct MonthlyRatingRow<'a> {
|
||||
rating: &'a MonthlyRating,
|
||||
bar_height_px: i64,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "profile.html")]
|
||||
struct ProfileTemplate<'a> {
|
||||
ctx: &'a HtmlPageContext,
|
||||
profile_display_name: String,
|
||||
stats: &'a UserStats,
|
||||
view: &'a str,
|
||||
entries: Option<&'a Paginated<DiaryEntry>>,
|
||||
current_offset: u32,
|
||||
has_more: bool,
|
||||
limit: u32,
|
||||
history: Option<&'a Vec<MonthActivity>>,
|
||||
trends: Option<&'a UserTrends>,
|
||||
monthly_rating_rows: Vec<MonthlyRatingRow<'a>>,
|
||||
heatmap: Vec<HeatmapCell>,
|
||||
}
|
||||
|
||||
struct HeatmapCell {
|
||||
month_label: String,
|
||||
count: i64,
|
||||
alpha: f64,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn relative_time(dt: chrono::NaiveDateTime) -> String {
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let diff = now.signed_duration_since(dt);
|
||||
if diff.num_seconds() <= 0 { return "just now".to_string(); }
|
||||
let minutes = diff.num_minutes();
|
||||
let hours = diff.num_hours();
|
||||
let days = diff.num_days();
|
||||
if minutes < 1 { return "just now".to_string(); }
|
||||
if minutes < 60 { return format!("{} min ago", minutes); }
|
||||
if hours < 24 { return format!("{} h ago", hours); }
|
||||
if days == 1 { return "yesterday".to_string(); }
|
||||
if days < 30 { return format!("{} days ago", days); }
|
||||
dt.format("%b %-d, %Y").to_string()
|
||||
}
|
||||
|
||||
fn build_heatmap(history: &[MonthActivity]) -> Vec<HeatmapCell> {
|
||||
let current_year = chrono::Utc::now().year();
|
||||
let count_for = |m: &str| -> i64 {
|
||||
history.iter().find(|a| a.year_month == format!("{}-{}", current_year, m))
|
||||
.map(|a| a.count)
|
||||
.unwrap_or(0)
|
||||
};
|
||||
let months = [
|
||||
("01", "Jan"), ("02", "Feb"), ("03", "Mar"), ("04", "Apr"),
|
||||
("05", "May"), ("06", "Jun"), ("07", "Jul"), ("08", "Aug"),
|
||||
("09", "Sep"), ("10", "Oct"), ("11", "Nov"), ("12", "Dec"),
|
||||
];
|
||||
let counts: Vec<i64> = months.iter().map(|(m, _)| count_for(m)).collect();
|
||||
let max = counts.iter().copied().max().unwrap_or(0).max(1);
|
||||
months.iter().zip(counts.iter()).map(|((_, label), &count)| {
|
||||
let alpha = if count == 0 { 0.05 } else { 0.15 + 0.75 * (count as f64 / max as f64) };
|
||||
HeatmapCell {
|
||||
month_label: label.to_string(),
|
||||
count,
|
||||
alpha,
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
||||
fn bar_height_px(avg_rating: f64) -> i64 {
|
||||
(avg_rating / 5.0 * 60.0) as i64
|
||||
}
|
||||
|
||||
pub struct AskamaHtmlRenderer;
|
||||
@@ -20,16 +138,94 @@ impl AskamaHtmlRenderer {
|
||||
}
|
||||
|
||||
impl HtmlRenderer for AskamaHtmlRenderer {
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String> {
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String> {
|
||||
let has_more = (data.offset + data.limit) < data.total_count as u32;
|
||||
|
||||
let template = DiaryTemplate {
|
||||
DiaryTemplate {
|
||||
entries: &data.items,
|
||||
current_offset: data.offset,
|
||||
limit: data.limit,
|
||||
has_more,
|
||||
};
|
||||
ctx: &ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
template.render().map_err(|e| e.to_string())
|
||||
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String> {
|
||||
LoginTemplate {
|
||||
error: data.error,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String> {
|
||||
RegisterTemplate {
|
||||
error: data.error,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String> {
|
||||
NewReviewTemplate {
|
||||
error: data.error,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String> {
|
||||
ActivityFeedTemplate {
|
||||
entries: &data.entries.items,
|
||||
current_offset: data.current_offset,
|
||||
limit: data.limit,
|
||||
has_more: data.has_more,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_users_page(&self, data: UsersPageData) -> Result<String, String> {
|
||||
UsersTemplate {
|
||||
users: &data.users,
|
||||
ctx: &data.ctx,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String> {
|
||||
let heatmap = data.history.as_deref()
|
||||
.map(|h| build_heatmap(h))
|
||||
.unwrap_or_default();
|
||||
let profile_display_name = data.profile_user_email
|
||||
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
|
||||
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
|
||||
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
|
||||
bar_height_px: bar_height_px(r.avg_rating),
|
||||
rating: r,
|
||||
}).collect())
|
||||
.unwrap_or_default();
|
||||
ProfileTemplate {
|
||||
ctx: &data.ctx,
|
||||
profile_display_name,
|
||||
stats: &data.stats,
|
||||
view: &data.view,
|
||||
entries: data.entries.as_ref(),
|
||||
current_offset: data.current_offset,
|
||||
has_more: data.has_more,
|
||||
limit: data.limit,
|
||||
history: data.history.as_ref(),
|
||||
trends: data.trends.as_ref(),
|
||||
monthly_rating_rows,
|
||||
heatmap,
|
||||
}
|
||||
.render()
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
50
crates/adapters/template-askama/templates/activity_feed.html
Normal file
50
crates/adapters/template-askama/templates/activity_feed.html
Normal file
@@ -0,0 +1,50 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="diary">
|
||||
{% for entry in entries %}
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<div class="poster">
|
||||
<img src="/posters/{{ poster.value() }}" alt="">
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">
|
||||
{{ entry.movie().title().value() }}
|
||||
<span class="year">({{ entry.movie().release_year().value() }})</span>
|
||||
</div>
|
||||
{% if let Some(dir) = entry.movie().director() %}
|
||||
<div class="director">{{ dir }}</div>
|
||||
{% endif %}
|
||||
<div class="rating">
|
||||
{% for filled in entry.review().stars() %}
|
||||
<span class="star {% if filled %}filled{% else %}empty{% endif %}">★</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<div class="comment">{{ comment.value() }}</div>
|
||||
{% endif %}
|
||||
<div class="feed-meta">
|
||||
<a href="/users/{{ entry.review().user_id().value() }}" class="feed-user">{{ entry.user_display_name() }}</a>
|
||||
<span class="feed-time">{{ entry.review().watched_at().format("%b %-d, %Y") }}</span>
|
||||
</div>
|
||||
{% if ctx.is_current_user(entry.review().user_id().value()) %}
|
||||
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
|
||||
<button type="submit">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% else %}
|
||||
<p class="empty">No movies logged yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<nav class="pagination">
|
||||
{% if current_offset >= limit %}
|
||||
<a href="/?offset={{ current_offset - limit }}">← Prev</a>
|
||||
{% endif %}
|
||||
{% if has_more %}
|
||||
<a href="/?offset={{ current_offset + limit }}">Next →</a>
|
||||
{% endif %}
|
||||
</nav>
|
||||
{% endblock %}
|
||||
42
crates/adapters/template-askama/templates/base.html
Normal file
42
crates/adapters/template-askama/templates/base.html
Normal file
@@ -0,0 +1,42 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{{ ctx.page_title }}</title>
|
||||
<meta name="description" content="A personal movie diary — track what you watch, rate and review films.">
|
||||
<meta property="og:type" content="website">
|
||||
<meta property="og:site_name" content="Movies Diary">
|
||||
<meta property="og:title" content="{{ ctx.page_title }}">
|
||||
<meta property="og:url" content="{{ ctx.canonical_url }}">
|
||||
<meta name="twitter:card" content="summary">
|
||||
<meta name="twitter:title" content="{{ ctx.page_title }}">
|
||||
<link rel="canonical" href="{{ ctx.canonical_url }}">
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<a href="/" class="site-title">Movies Diary</a>
|
||||
<nav>
|
||||
<a href="/">Feed</a>
|
||||
<a href="/users">Users</a>
|
||||
<a href="{{ ctx.rss_url }}">RSS</a>
|
||||
{% if let Some(email) = ctx.user_email %}
|
||||
<a href="/reviews/new">Add Review</a>
|
||||
<a href="/logout">Logout</a>
|
||||
{% else %}
|
||||
<a href="/login">Login</a>
|
||||
{% if ctx.register_enabled %}
|
||||
<a href="/register">Register</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</nav>
|
||||
</header>
|
||||
<main>
|
||||
{% block content %}{% endblock %}
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,76 +1,51 @@
|
||||
<!-- crates/presentation/templates/diary.html -->
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>My Movie Diary</title>
|
||||
<style>
|
||||
/* Minimalist old-school styling */
|
||||
body { font-family: monospace; max-width: 800px; margin: 0 auto; padding: 20px; }
|
||||
.entry { border-bottom: 1px solid #ccc; padding: 10px 0; }
|
||||
.poster { max-width: 100px; float: left; margin-right: 15px; }
|
||||
.clear { clear: both; }
|
||||
.error { color: red; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Movie Diary</h1>
|
||||
|
||||
<!-- Zero-JS Form Submission -->
|
||||
<form action="/reviews" method="POST">
|
||||
<fieldset>
|
||||
<legend>Log a Movie</legend>
|
||||
|
||||
<label for="tmdb_id">TMDB ID (Optional):</label>
|
||||
<input type="text" name="external_metadata_id" id="tmdb_id"><br><br>
|
||||
|
||||
<label for="title">Title (Fallback):</label>
|
||||
<input type="text" name="manual_title" id="title"><br><br>
|
||||
|
||||
<label for="year">Year (Fallback):</label>
|
||||
<input type="number" name="manual_release_year" id="year" min="1888"><br><br>
|
||||
|
||||
<label for="rating">Rating (0-5):</label>
|
||||
<input type="number" name="rating" id="rating" min="0" max="5" required><br><br>
|
||||
|
||||
<button type="submit">Log Movie</button>
|
||||
</fieldset>
|
||||
</form>
|
||||
|
||||
<hr>
|
||||
|
||||
<!-- Rendering the Domain Models -->
|
||||
<div class="diary-entries">
|
||||
{% for entry in entries %}
|
||||
<div class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<!-- Assuming you have a route to serve the raw images -->
|
||||
<img src="/static/posters/{{ poster.value() }}" class="poster" alt="Poster">
|
||||
{% endif %}
|
||||
|
||||
<h3>{{ entry.movie().title().value() }} ({{ entry.movie().release_year().value() }})</h3>
|
||||
<p><strong>Rating:</strong> {{ entry.review().rating().value() }} / 5</p>
|
||||
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<p><em>"{{ comment.value() }}"</em></p>
|
||||
{% endif %}
|
||||
|
||||
<p><small>Watched on: {{ entry.review().watched_at().format("%Y-%m-%d") }}</small></p>
|
||||
<div class="clear"></div>
|
||||
</div>
|
||||
{% else %}
|
||||
<p>No movies logged yet. Go watch something!</p>
|
||||
{% endfor %}
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="diary">
|
||||
{% for entry in entries %}
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<div class="poster">
|
||||
<img src="/posters/{{ poster.value() }}" alt="">
|
||||
</div>
|
||||
|
||||
<!-- Simple Pagination -->
|
||||
<div>
|
||||
{% if current_offset > 0 %}
|
||||
<a href="/diary?offset={{ current_offset - limit }}">Previous Page</a>
|
||||
{% endif %}
|
||||
{% if has_more %}
|
||||
<a href="/diary?offset={{ current_offset + limit }}">Next Page</a>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">
|
||||
{{ entry.movie().title().value() }}
|
||||
<span class="year">({{ entry.movie().release_year().value() }})</span>
|
||||
</div>
|
||||
{% if let Some(dir) = entry.movie().director() %}
|
||||
<div class="director">{{ dir }}</div>
|
||||
{% endif %}
|
||||
<div class="rating">
|
||||
<span class="star {% if entry.review().rating().value() >= 1 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 2 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 3 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 4 %}filled{% else %}empty{% endif %}">★</span>
|
||||
<span class="star {% if entry.review().rating().value() >= 5 %}filled{% else %}empty{% endif %}">★</span>
|
||||
</div>
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<div class="comment">{{ comment.value() }}</div>
|
||||
{% endif %}
|
||||
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
|
||||
{% if let Some(uid) = ctx.user_id %}
|
||||
{% if *uid == entry.review().user_id().value() %}
|
||||
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
|
||||
<button type="submit">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
</article>
|
||||
{% else %}
|
||||
<p class="empty">No movies logged yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<nav class="pagination">
|
||||
{% if current_offset > 0 %}
|
||||
<a href="/?offset={{ current_offset - limit }}">← Prev</a>
|
||||
{% endif %}
|
||||
{% if has_more %}
|
||||
<a href="/?offset={{ current_offset + limit }}">Next →</a>
|
||||
{% endif %}
|
||||
</nav>
|
||||
{% endblock %}
|
||||
|
||||
18
crates/adapters/template-askama/templates/login.html
Normal file
18
crates/adapters/template-askama/templates/login.html
Normal file
@@ -0,0 +1,18 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>Login</h1>
|
||||
{% if let Some(err) = error %}
|
||||
<p class="error">{{ err }}</p>
|
||||
{% endif %}
|
||||
<form method="POST" action="/login">
|
||||
<label>
|
||||
Email<br>
|
||||
<input type="email" name="email" required autocomplete="email">
|
||||
</label>
|
||||
<label>
|
||||
Password<br>
|
||||
<input type="password" name="password" required autocomplete="current-password">
|
||||
</label>
|
||||
<button type="submit">Login</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
40
crates/adapters/template-askama/templates/new_review.html
Normal file
40
crates/adapters/template-askama/templates/new_review.html
Normal file
@@ -0,0 +1,40 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>Log a Review</h1>
|
||||
{% if let Some(err) = error %}
|
||||
<p class="error">{{ err }}</p>
|
||||
{% endif %}
|
||||
<form method="POST" action="/reviews">
|
||||
<label>
|
||||
OMDB ID <span class="optional">(optional)</span><br>
|
||||
<input type="text" name="external_metadata_id" placeholder="tt0166924">
|
||||
</label>
|
||||
<hr>
|
||||
<label>
|
||||
Title<br>
|
||||
<input type="text" name="manual_title">
|
||||
</label>
|
||||
<label>
|
||||
Year<br>
|
||||
<input type="number" name="manual_release_year" min="1888" max="2100">
|
||||
</label>
|
||||
<label>
|
||||
Director<br>
|
||||
<input type="text" name="manual_director">
|
||||
</label>
|
||||
<hr>
|
||||
<label>
|
||||
Rating (0–5)<br>
|
||||
<input type="number" name="rating" min="0" max="5" required>
|
||||
</label>
|
||||
<label>
|
||||
Watched<br>
|
||||
<input type="datetime-local" name="watched_at" required>
|
||||
</label>
|
||||
<label>
|
||||
Comment<br>
|
||||
<textarea name="comment"></textarea>
|
||||
</label>
|
||||
<button type="submit">Log Review</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
165
crates/adapters/template-askama/templates/profile.html
Normal file
165
crates/adapters/template-askama/templates/profile.html
Normal file
@@ -0,0 +1,165 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="profile">
|
||||
|
||||
<div class="stats-header">
|
||||
<div class="profile-name">{{ profile_display_name }}</div>
|
||||
<div class="stats-grid">
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.total_movies }}</div>
|
||||
<div class="stat-label">movies</div>
|
||||
</div>
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.avg_rating_display() }}★</div>
|
||||
<div class="stat-label">avg rating</div>
|
||||
</div>
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.favorite_director_display() }}</div>
|
||||
<div class="stat-label">fav director</div>
|
||||
</div>
|
||||
<div class="stat-tile">
|
||||
<div class="stat-value">{{ stats.most_active_month_display() }}</div>
|
||||
<div class="stat-label">most active</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="view-tabs">
|
||||
<a href="?view=recent" class="view-tab {% if view == "recent" %}active{% endif %}">Recent</a>
|
||||
<a href="?view=ratings" class="view-tab {% if view == "ratings" %}active{% endif %}">Top Rated</a>
|
||||
<a href="?view=history" class="view-tab {% if view == "history" %}active{% endif %}">History</a>
|
||||
<a href="?view=trends" class="view-tab {% if view == "trends" %}active{% endif %}">Trends</a>
|
||||
</div>
|
||||
|
||||
{% if view == "history" %}
|
||||
{% if let Some(hist) = history %}
|
||||
<div class="heatmap-section">
|
||||
<div class="heatmap-label">Movies watched this year</div>
|
||||
<div class="heatmap">
|
||||
{% for cell in heatmap %}
|
||||
<div class="heatmap-cell" style="--alpha: {{ cell.alpha }}">
|
||||
<div class="heatmap-count">{{ cell.count }}</div>
|
||||
<div class="heatmap-month">{{ cell.month_label }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% for month in hist %}
|
||||
<div class="history-month">
|
||||
<h3 class="month-heading">{{ month.month_label }} <span class="month-count">{{ month.count }}</span></h3>
|
||||
<div class="diary">
|
||||
{% for entry in month.entries %}
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<div class="poster"><img src="/posters/{{ poster.value() }}" alt=""></div>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">{{ entry.movie().title().value() }} <span class="year">({{ entry.movie().release_year().value() }})</span></div>
|
||||
{% if let Some(dir) = entry.movie().director() %}<div class="director">{{ dir }}</div>{% endif %}
|
||||
<div class="rating">
|
||||
{% for filled in entry.review().stars() %}
|
||||
<span class="star {% if filled %}filled{% else %}empty{% endif %}">★</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<div class="watched-at">{{ entry.review().watched_at().format("%b %-d") }}</div>
|
||||
</div>
|
||||
</article>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="empty">No movies logged yet.</p>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% elif view == "trends" %}
|
||||
{% if let Some(t) = trends %}
|
||||
<div class="trends-section">
|
||||
{% if !monthly_rating_rows.is_empty() %}
|
||||
<div class="chart-block">
|
||||
<div class="chart-label">Average rating per month</div>
|
||||
<div class="bar-chart">
|
||||
{% for row in monthly_rating_rows %}
|
||||
<div class="bar-col">
|
||||
<div class="bar-value">{{ "{:.1}"|format(row.rating.avg_rating) }}</div>
|
||||
<div class="bar-fill" style="height: {{ row.bar_height_px }}px"></div>
|
||||
<div class="bar-month">{{ row.rating.month_label }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if !t.top_directors.is_empty() %}
|
||||
<div class="chart-block">
|
||||
<div class="chart-label">Most watched directors</div>
|
||||
<div class="director-chart">
|
||||
{% for d in t.top_directors %}
|
||||
<div class="director-row">
|
||||
<div class="director-name">{{ d.director }}</div>
|
||||
<div class="director-bar">
|
||||
{% if t.max_director_count > 0 %}
|
||||
<div class="director-bar-fill" style="width: {{ d.count * 100 / t.max_director_count }}%"></div>
|
||||
{% else %}
|
||||
<div class="director-bar-fill" style="width: 0%"></div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="director-count">{{ d.count }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
{% if let Some(paged) = entries %}
|
||||
<div class="diary">
|
||||
{% for entry in paged.items %}
|
||||
<article class="entry">
|
||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||
<div class="poster">
|
||||
<img src="/posters/{{ poster.value() }}" alt="">
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="entry-body">
|
||||
<div class="entry-title">
|
||||
{{ entry.movie().title().value() }}
|
||||
<span class="year">({{ entry.movie().release_year().value() }})</span>
|
||||
</div>
|
||||
{% if let Some(dir) = entry.movie().director() %}
|
||||
<div class="director">{{ dir }}</div>
|
||||
{% endif %}
|
||||
<div class="rating">
|
||||
{% for filled in entry.review().stars() %}
|
||||
<span class="star {% if filled %}filled{% else %}empty{% endif %}">★</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% if let Some(comment) = entry.review().comment() %}
|
||||
<div class="comment">{{ comment.value() }}</div>
|
||||
{% endif %}
|
||||
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
|
||||
{% if ctx.is_current_user(entry.review().user_id().value()) %}
|
||||
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete" class="delete-form">
|
||||
<button type="submit">Delete</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% else %}
|
||||
<p class="empty">No reviews yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<nav class="pagination">
|
||||
{% if current_offset >= limit %}
|
||||
<a href="?view={{ view }}&offset={{ current_offset - limit }}">← Prev</a>
|
||||
{% endif %}
|
||||
{% if has_more %}
|
||||
<a href="?view={{ view }}&offset={{ current_offset + limit }}">Next →</a>
|
||||
{% endif %}
|
||||
</nav>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
{% endblock %}
|
||||
18
crates/adapters/template-askama/templates/register.html
Normal file
18
crates/adapters/template-askama/templates/register.html
Normal file
@@ -0,0 +1,18 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>Register</h1>
|
||||
{% if let Some(err) = error %}
|
||||
<p class="error">{{ err }}</p>
|
||||
{% endif %}
|
||||
<form method="POST" action="/register">
|
||||
<label>
|
||||
Email<br>
|
||||
<input type="email" name="email" required autocomplete="email">
|
||||
</label>
|
||||
<label>
|
||||
Password<br>
|
||||
<input type="password" name="password" required autocomplete="new-password">
|
||||
</label>
|
||||
<button type="submit">Register</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
18
crates/adapters/template-askama/templates/users.html
Normal file
18
crates/adapters/template-askama/templates/users.html
Normal file
@@ -0,0 +1,18 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<div class="users-list">
|
||||
<h2 class="page-title">Members</h2>
|
||||
{% for user in users %}
|
||||
<div class="user-row">
|
||||
<div class="user-avatar">{{ user.initial() }}</div>
|
||||
<div class="user-info">
|
||||
<div class="user-name">{{ user.display_name() }}</div>
|
||||
<div class="user-meta">{{ user.total_movies }} movies · avg {{ user.avg_rating_display() }}★</div>
|
||||
</div>
|
||||
<a href="/users/{{ user.user_id.value() }}" class="btn-secondary">View profile →</a>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="empty">No users yet.</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -4,7 +4,11 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
domain = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true }
|
||||
|
||||
@@ -14,7 +14,23 @@ pub struct LogReviewCommand {
|
||||
pub watched_at: NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SyncPosterCommand {
|
||||
pub movie_id: Uuid,
|
||||
pub external_metadata_id: String,
|
||||
}
|
||||
|
||||
pub struct LoginCommand {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
pub struct RegisterCommand {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
pub struct DeleteReviewCommand {
|
||||
pub review_id: Uuid,
|
||||
pub requesting_user_id: Uuid,
|
||||
}
|
||||
|
||||
16
crates/application/src/config.rs
Normal file
16
crates/application/src/config.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
#[derive(Clone)]
|
||||
pub struct AppConfig {
|
||||
pub allow_registration: bool,
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
impl AppConfig {
|
||||
pub fn from_env() -> Self {
|
||||
let allow_registration = std::env::var("ALLOW_REGISTRATION")
|
||||
.map(|v| v == "true" || v == "1")
|
||||
.unwrap_or(false);
|
||||
let base_url = std::env::var("BASE_URL")
|
||||
.unwrap_or_else(|_| "http://localhost:3000".to_string());
|
||||
Self { allow_registration, base_url }
|
||||
}
|
||||
}
|
||||
@@ -2,9 +2,11 @@ use std::sync::Arc;
|
||||
|
||||
use domain::ports::{
|
||||
AuthService, EventPublisher, MetadataClient, MovieRepository, PasswordHasher,
|
||||
PosterFetcherClient, PosterStorage,
|
||||
PosterFetcherClient, PosterStorage, UserRepository,
|
||||
};
|
||||
|
||||
use crate::config::AppConfig;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppContext {
|
||||
pub repository: Arc<dyn MovieRepository>,
|
||||
@@ -14,4 +16,6 @@ pub struct AppContext {
|
||||
pub event_publisher: Arc<dyn EventPublisher>,
|
||||
pub auth_service: Arc<dyn AuthService>,
|
||||
pub password_hasher: Arc<dyn PasswordHasher>,
|
||||
pub user_repository: Arc<dyn UserRepository>,
|
||||
pub config: AppConfig,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
pub mod commands;
|
||||
pub mod config;
|
||||
pub mod context;
|
||||
pub mod movie_resolver;
|
||||
pub mod ports;
|
||||
pub mod queries;
|
||||
pub mod use_cases;
|
||||
|
||||
595
crates/application/src/movie_resolver.rs
Normal file
595
crates/application/src/movie_resolver.rs
Normal file
@@ -0,0 +1,595 @@
|
||||
use async_trait::async_trait;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::Movie,
|
||||
ports::{MetadataClient, MetadataSearchCriteria, MovieRepository},
|
||||
value_objects::{ExternalMetadataId, MovieTitle, ReleaseYear},
|
||||
};
|
||||
|
||||
use crate::commands::LogReviewCommand;
|
||||
|
||||
pub struct MovieResolverDeps<'a> {
|
||||
pub repository: &'a dyn MovieRepository,
|
||||
pub metadata_client: &'a dyn MetadataClient,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait ResolutionStrategy: Send + Sync {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool;
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError>;
|
||||
}
|
||||
|
||||
pub struct ExternalIdStrategy;
|
||||
pub struct TitleSearchStrategy;
|
||||
pub struct ManualMovieStrategy;
|
||||
|
||||
pub struct MovieResolver {
|
||||
strategies: Vec<Box<dyn ResolutionStrategy>>,
|
||||
}
|
||||
|
||||
impl MovieResolver {
|
||||
pub fn default_pipeline() -> Self {
|
||||
Self {
|
||||
strategies: vec![
|
||||
Box::new(ExternalIdStrategy),
|
||||
Box::new(TitleSearchStrategy),
|
||||
Box::new(ManualMovieStrategy),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<(Movie, bool), DomainError> {
|
||||
for strategy in &self.strategies {
|
||||
if strategy.can_handle(cmd) {
|
||||
if let Some(result) = strategy.resolve(cmd, deps).await? {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(DomainError::ValidationError(
|
||||
"Manual title required if TMDB fetch fails or is omitted".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ResolutionStrategy for ExternalIdStrategy {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
|
||||
cmd.external_metadata_id.is_some()
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let ext_id_str = cmd.external_metadata_id.as_deref().unwrap();
|
||||
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
|
||||
|
||||
if let Some(m) = deps.repository.get_movie_by_external_id(&tmdb_id).await? {
|
||||
return Ok(Some((m, false)));
|
||||
}
|
||||
|
||||
match deps
|
||||
.metadata_client
|
||||
.fetch_movie_metadata(&MetadataSearchCriteria::ImdbId(tmdb_id))
|
||||
.await
|
||||
{
|
||||
Ok(m) => Ok(Some((m, true))),
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
|
||||
e
|
||||
);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ResolutionStrategy for TitleSearchStrategy {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
|
||||
cmd.manual_title.is_some()
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let title = cmd.manual_title.as_deref().unwrap();
|
||||
let criteria = MetadataSearchCriteria::Title {
|
||||
title: title.to_string(),
|
||||
year: cmd.manual_release_year,
|
||||
};
|
||||
match deps.metadata_client.fetch_movie_metadata(&criteria).await {
|
||||
Ok(m) => Ok(Some((m, true))),
|
||||
Err(e) => {
|
||||
tracing::warn!("OMDb title search failed, falling back to manual: {:?}", e);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ResolutionStrategy for ManualMovieStrategy {
|
||||
fn can_handle(&self, cmd: &LogReviewCommand) -> bool {
|
||||
cmd.manual_title.is_some()
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
cmd: &LogReviewCommand,
|
||||
deps: &MovieResolverDeps<'_>,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let title_str = match &cmd.manual_title {
|
||||
Some(t) => t,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let year_val = cmd.manual_release_year.ok_or_else(|| {
|
||||
DomainError::ValidationError(
|
||||
"Manual release year required if TMDB fetch fails or is omitted".into(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let title = MovieTitle::new(title_str.clone())?;
|
||||
let release_year = ReleaseYear::new(year_val)?;
|
||||
|
||||
let candidates = deps
|
||||
.repository
|
||||
.get_movies_by_title_and_year(&title, &release_year)
|
||||
.await?;
|
||||
|
||||
let matched = candidates
|
||||
.into_iter()
|
||||
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
|
||||
|
||||
if let Some(existing) = matched {
|
||||
Ok(Some((existing, false)))
|
||||
} else {
|
||||
let new_movie =
|
||||
Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
|
||||
Ok(Some((new_movie, true)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::NaiveDate;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
|
||||
ports::{MetadataSearchCriteria, MovieRepository},
|
||||
value_objects::{
|
||||
ExternalMetadataId, MovieId, MovieTitle, PosterUrl, ReleaseYear, ReviewId,
|
||||
},
|
||||
};
|
||||
|
||||
fn make_cmd(ext_id: Option<&str>, title: Option<&str>, year: Option<u16>) -> LogReviewCommand {
|
||||
LogReviewCommand {
|
||||
external_metadata_id: ext_id.map(String::from),
|
||||
manual_title: title.map(String::from),
|
||||
manual_release_year: year,
|
||||
manual_director: None,
|
||||
user_id: uuid::Uuid::new_v4(),
|
||||
rating: 4,
|
||||
comment: None,
|
||||
watched_at: NaiveDate::from_ymd_opt(2024, 1, 1)
|
||||
.unwrap()
|
||||
.and_hms_opt(0, 0, 0)
|
||||
.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_movie() -> Movie {
|
||||
Movie::new(
|
||||
None,
|
||||
MovieTitle::new("Inception".to_string()).unwrap(),
|
||||
ReleaseYear::new(2010).unwrap(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
struct RepoWithExternalMovie(Movie);
|
||||
struct RepoEmpty;
|
||||
struct RepoWithTitleMatch(Movie);
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for RepoWithExternalMovie {
|
||||
async fn get_movie_by_external_id(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<Movie>, DomainError> {
|
||||
Ok(Some(self.0.clone()))
|
||||
}
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movies_by_title_and_year(
|
||||
&self,
|
||||
_: &MovieTitle,
|
||||
_: &ReleaseYear,
|
||||
) -> Result<Vec<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_diary(
|
||||
&self,
|
||||
_: &DiaryFilter,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_by_id(
|
||||
&self,
|
||||
_: &ReviewId,
|
||||
) -> Result<Option<Review>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for RepoEmpty {
|
||||
async fn get_movie_by_external_id(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<Movie>, DomainError> {
|
||||
Ok(None)
|
||||
}
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movies_by_title_and_year(
|
||||
&self,
|
||||
_: &MovieTitle,
|
||||
_: &ReleaseYear,
|
||||
) -> Result<Vec<Movie>, DomainError> {
|
||||
Ok(vec![])
|
||||
}
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_diary(
|
||||
&self,
|
||||
_: &DiaryFilter,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_by_id(
|
||||
&self,
|
||||
_: &ReviewId,
|
||||
) -> Result<Option<Review>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for RepoWithTitleMatch {
|
||||
async fn get_movie_by_external_id(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_movies_by_title_and_year(
|
||||
&self,
|
||||
_: &MovieTitle,
|
||||
_: &ReleaseYear,
|
||||
) -> Result<Vec<Movie>, DomainError> {
|
||||
Ok(vec![self.0.clone()])
|
||||
}
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_diary(
|
||||
&self,
|
||||
_: &DiaryFilter,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn get_review_by_id(
|
||||
&self,
|
||||
_: &ReviewId,
|
||||
) -> Result<Option<Review>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
struct MetaReturnsMovie(Movie);
|
||||
struct MetaErrors;
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for MetaReturnsMovie {
|
||||
async fn fetch_movie_metadata(
|
||||
&self,
|
||||
_: &MetadataSearchCriteria,
|
||||
) -> Result<Movie, DomainError> {
|
||||
Ok(self.0.clone())
|
||||
}
|
||||
async fn get_poster_url(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<PosterUrl>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for MetaErrors {
|
||||
async fn fetch_movie_metadata(
|
||||
&self,
|
||||
_: &MetadataSearchCriteria,
|
||||
) -> Result<Movie, DomainError> {
|
||||
Err(DomainError::InfrastructureError("metadata unavailable".into()))
|
||||
}
|
||||
async fn get_poster_url(
|
||||
&self,
|
||||
_: &ExternalMetadataId,
|
||||
) -> Result<Option<PosterUrl>, DomainError> {
|
||||
panic!("unexpected")
|
||||
}
|
||||
}
|
||||
|
||||
// --- ExternalIdStrategy ---
|
||||
|
||||
#[test]
|
||||
fn external_id_strategy_can_handle_cmd_with_id() {
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
assert!(ExternalIdStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_id_strategy_cannot_handle_cmd_without_id() {
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
assert!(!ExternalIdStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_id_strategy_returns_cached_movie() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoWithExternalMovie(movie.clone());
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, false))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_id_strategy_fetches_from_metadata_when_not_cached() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaReturnsMovie(movie);
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, true))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_id_strategy_falls_through_on_metadata_error() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let result = ExternalIdStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
// --- TitleSearchStrategy ---
|
||||
|
||||
#[test]
|
||||
fn title_strategy_can_handle_cmd_with_title() {
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
assert!(TitleSearchStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn title_strategy_cannot_handle_cmd_without_title() {
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
assert!(!TitleSearchStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn title_strategy_fetches_from_metadata() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaReturnsMovie(movie);
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, true))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn title_strategy_falls_through_on_metadata_error() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = TitleSearchStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
// --- ManualMovieStrategy ---
|
||||
|
||||
#[test]
|
||||
fn manual_strategy_can_handle_cmd_with_title() {
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
assert!(ManualMovieStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn manual_strategy_cannot_handle_cmd_without_title() {
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
assert!(!ManualMovieStrategy.can_handle(&cmd));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn manual_strategy_returns_existing_movie() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoWithTitleMatch(movie.clone());
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, false))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn manual_strategy_creates_new_movie_when_no_match() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), Some(2010));
|
||||
let result = ManualMovieStrategy.resolve(&cmd, &deps).await.unwrap();
|
||||
assert!(matches!(result, Some((_, true))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn manual_strategy_errors_without_year() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, Some("Inception"), None);
|
||||
assert!(ManualMovieStrategy.resolve(&cmd, &deps).await.is_err());
|
||||
}
|
||||
|
||||
// --- MovieResolver pipeline ---
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolver_returns_error_when_no_strategy_matches() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(None, None, None);
|
||||
let result = MovieResolver::default_pipeline().resolve(&cmd, &deps).await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolver_uses_cached_movie_when_external_id_matches() {
|
||||
let movie = make_movie();
|
||||
let repo = RepoWithExternalMovie(movie.clone());
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), None, None);
|
||||
let (_, is_new) = MovieResolver::default_pipeline()
|
||||
.resolve(&cmd, &deps)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!is_new);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolver_falls_through_to_manual_when_external_and_title_both_fail() {
|
||||
let repo = RepoEmpty;
|
||||
let meta = MetaErrors;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: &repo,
|
||||
metadata_client: &meta,
|
||||
};
|
||||
let cmd = make_cmd(Some("tt123"), Some("Inception"), Some(2010));
|
||||
let (_, is_new) = MovieResolver::default_pipeline()
|
||||
.resolve(&cmd, &deps)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(is_new);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,74 @@
|
||||
use domain::models::{DiaryEntry, collections::Paginated};
|
||||
use uuid::Uuid;
|
||||
|
||||
use domain::models::{DiaryEntry, FeedEntry, MonthActivity, UserStats, UserSummary, UserTrends, collections::Paginated};
|
||||
|
||||
pub struct HtmlPageContext {
|
||||
pub user_email: Option<String>,
|
||||
pub user_id: Option<Uuid>,
|
||||
pub register_enabled: bool,
|
||||
pub rss_url: String,
|
||||
pub page_title: String,
|
||||
pub canonical_url: String,
|
||||
}
|
||||
|
||||
impl HtmlPageContext {
|
||||
pub fn is_current_user(&self, id: Uuid) -> bool {
|
||||
self.user_id == Some(id)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LoginPageData<'a> {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub error: Option<&'a str>,
|
||||
}
|
||||
|
||||
pub struct RegisterPageData<'a> {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub error: Option<&'a str>,
|
||||
}
|
||||
|
||||
pub struct NewReviewPageData<'a> {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub error: Option<&'a str>,
|
||||
}
|
||||
|
||||
pub struct ActivityFeedPageData {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub entries: Paginated<FeedEntry>,
|
||||
pub current_offset: u32,
|
||||
pub has_more: bool,
|
||||
pub limit: u32,
|
||||
}
|
||||
|
||||
pub struct UsersPageData {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub users: Vec<UserSummary>,
|
||||
}
|
||||
|
||||
pub struct ProfilePageData {
|
||||
pub ctx: HtmlPageContext,
|
||||
pub profile_user_id: Uuid,
|
||||
pub profile_user_email: String,
|
||||
pub stats: UserStats,
|
||||
pub view: String,
|
||||
pub entries: Option<Paginated<DiaryEntry>>,
|
||||
pub current_offset: u32,
|
||||
pub has_more: bool,
|
||||
pub limit: u32,
|
||||
pub history: Option<Vec<MonthActivity>>,
|
||||
pub trends: Option<UserTrends>,
|
||||
}
|
||||
|
||||
pub trait HtmlRenderer: Send + Sync {
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String>;
|
||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String>;
|
||||
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String>;
|
||||
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String>;
|
||||
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String>;
|
||||
fn render_activity_feed_page(&self, data: ActivityFeedPageData) -> Result<String, String>;
|
||||
fn render_users_page(&self, data: UsersPageData) -> Result<String, String>;
|
||||
fn render_profile_page(&self, data: ProfilePageData) -> Result<String, String>;
|
||||
}
|
||||
|
||||
pub trait RssFeedRenderer: Send + Sync {
|
||||
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String>;
|
||||
}
|
||||
|
||||
@@ -6,8 +6,23 @@ pub struct GetDiaryQuery {
|
||||
pub offset: Option<u32>,
|
||||
pub sort_by: Option<SortDirection>,
|
||||
pub movie_id: Option<Uuid>,
|
||||
pub user_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
pub struct GetReviewHistoryQuery {
|
||||
pub movie_id: Uuid,
|
||||
}
|
||||
|
||||
pub struct GetActivityFeedQuery {
|
||||
pub limit: Option<u32>,
|
||||
pub offset: Option<u32>,
|
||||
}
|
||||
|
||||
pub struct GetUsersQuery;
|
||||
|
||||
pub struct GetUserProfileQuery {
|
||||
pub user_id: Uuid,
|
||||
pub view: String,
|
||||
pub limit: Option<u32>,
|
||||
pub offset: Option<u32>,
|
||||
}
|
||||
|
||||
27
crates/application/src/use_cases/delete_review.rs
Normal file
27
crates/application/src/use_cases/delete_review.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use domain::{errors::DomainError, value_objects::{ReviewId, UserId}};
|
||||
use crate::{commands::DeleteReviewCommand, context::AppContext};
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: DeleteReviewCommand) -> Result<(), DomainError> {
|
||||
let review_id = ReviewId::from_uuid(cmd.review_id);
|
||||
let requesting_user_id = UserId::from_uuid(cmd.requesting_user_id);
|
||||
|
||||
let review = ctx
|
||||
.repository
|
||||
.get_review_by_id(&review_id)
|
||||
.await?
|
||||
.ok_or_else(|| DomainError::NotFound(format!("review {}", cmd.review_id)))?;
|
||||
|
||||
if review.user_id() != &requesting_user_id {
|
||||
return Err(DomainError::Unauthorized("not your review".into()));
|
||||
}
|
||||
|
||||
let movie_id = review.movie_id().clone();
|
||||
ctx.repository.delete_review(&review_id).await?;
|
||||
|
||||
let history = ctx.repository.get_review_history(&movie_id).await?;
|
||||
if history.viewings().is_empty() {
|
||||
ctx.repository.delete_movie(&movie_id).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
13
crates/application/src/use_cases/get_activity_feed.rs
Normal file
13
crates/application/src/use_cases/get_activity_feed.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::{FeedEntry, collections::{PageParams, Paginated}},
|
||||
};
|
||||
use crate::{context::AppContext, queries::GetActivityFeedQuery};
|
||||
|
||||
pub async fn execute(
|
||||
ctx: &AppContext,
|
||||
query: GetActivityFeedQuery,
|
||||
) -> Result<Paginated<FeedEntry>, DomainError> {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
ctx.repository.query_activity_feed(&page).await
|
||||
}
|
||||
@@ -4,7 +4,7 @@ use domain::{
|
||||
DiaryEntry, DiaryFilter, SortDirection,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
value_objects::MovieId,
|
||||
value_objects::{MovieId, UserId},
|
||||
};
|
||||
|
||||
use crate::{context::AppContext, queries::GetDiaryQuery};
|
||||
@@ -14,16 +14,15 @@ pub async fn execute(
|
||||
query: GetDiaryQuery,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
|
||||
let movie_id = query.movie_id.map(MovieId::from_uuid);
|
||||
let user_id = query.user_id.map(UserId::from_uuid);
|
||||
|
||||
let filter = DiaryFilter {
|
||||
sort_by: query.sort_by.unwrap_or(SortDirection::Descending),
|
||||
page,
|
||||
movie_id,
|
||||
user_id,
|
||||
};
|
||||
|
||||
let paginated_results = ctx.repository.query_diary(&filter).await?;
|
||||
|
||||
Ok(paginated_results)
|
||||
ctx.repository.query_diary(&filter).await
|
||||
}
|
||||
|
||||
93
crates/application/src/use_cases/get_user_profile.rs
Normal file
93
crates/application/src/use_cases/get_user_profile.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
models::{
|
||||
DiaryEntry, DiaryFilter, MonthActivity, SortDirection, UserStats, UserTrends,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
value_objects::UserId,
|
||||
};
|
||||
use crate::{context::AppContext, queries::GetUserProfileQuery};
|
||||
|
||||
pub struct UserProfileData {
|
||||
pub stats: UserStats,
|
||||
pub entries: Option<Paginated<DiaryEntry>>,
|
||||
pub history: Option<Vec<MonthActivity>>,
|
||||
pub trends: Option<UserTrends>,
|
||||
}
|
||||
|
||||
pub async fn execute(
|
||||
ctx: &AppContext,
|
||||
query: GetUserProfileQuery,
|
||||
) -> Result<UserProfileData, DomainError> {
|
||||
let user_id = UserId::from_uuid(query.user_id);
|
||||
let stats = ctx.repository.get_user_stats(&user_id).await?;
|
||||
|
||||
match query.view.as_str() {
|
||||
"history" => {
|
||||
// V1: loads all entries into memory. Personal diaries are bounded in size;
|
||||
// spec calls for showing every movie grouped by month, so full load is intentional.
|
||||
let all_entries = ctx.repository.get_user_history(&user_id).await?;
|
||||
let history = group_by_month(all_entries);
|
||||
Ok(UserProfileData { stats, entries: None, history: Some(history), trends: None })
|
||||
}
|
||||
"trends" => {
|
||||
let trends = ctx.repository.get_user_trends(&user_id).await?;
|
||||
Ok(UserProfileData { stats, entries: None, history: None, trends: Some(trends) })
|
||||
}
|
||||
"ratings" => {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
let filter = DiaryFilter {
|
||||
sort_by: SortDirection::ByRatingDesc,
|
||||
page,
|
||||
movie_id: None,
|
||||
user_id: Some(user_id),
|
||||
};
|
||||
let entries = ctx.repository.query_diary(&filter).await?;
|
||||
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
|
||||
}
|
||||
"recent" => {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
let filter = DiaryFilter {
|
||||
sort_by: SortDirection::Descending,
|
||||
page,
|
||||
movie_id: None,
|
||||
user_id: Some(user_id),
|
||||
};
|
||||
let entries = ctx.repository.query_diary(&filter).await?;
|
||||
Ok(UserProfileData { stats, entries: Some(entries), history: None, trends: None })
|
||||
}
|
||||
other => Err(DomainError::ValidationError(format!("unknown view: {}", other))),
|
||||
}
|
||||
}
|
||||
|
||||
fn group_by_month(entries: Vec<DiaryEntry>) -> Vec<MonthActivity> {
|
||||
use std::collections::BTreeMap;
|
||||
let mut map: BTreeMap<String, Vec<DiaryEntry>> = BTreeMap::new();
|
||||
for entry in entries {
|
||||
let ym = entry.review().watched_at().format("%Y-%m").to_string();
|
||||
map.entry(ym).or_default().push(entry);
|
||||
}
|
||||
let mut result: Vec<MonthActivity> = map
|
||||
.into_iter()
|
||||
.map(|(ym, entries)| MonthActivity {
|
||||
month_label: format_year_month_long(&ym),
|
||||
count: entries.len() as i64,
|
||||
entries,
|
||||
year_month: ym,
|
||||
})
|
||||
.collect();
|
||||
result.reverse();
|
||||
result
|
||||
}
|
||||
|
||||
fn format_year_month_long(ym: &str) -> String {
|
||||
let parts: Vec<&str> = ym.splitn(2, '-').collect();
|
||||
if parts.len() != 2 { return ym.to_string(); }
|
||||
let month = match parts[1] {
|
||||
"01" => "January", "02" => "February", "03" => "March", "04" => "April",
|
||||
"05" => "May", "06" => "June", "07" => "July", "08" => "August",
|
||||
"09" => "September", "10" => "October", "11" => "November", "12" => "December",
|
||||
_ => parts[1],
|
||||
};
|
||||
format!("{} {}", month, parts[0])
|
||||
}
|
||||
9
crates/application/src/use_cases/get_users.rs
Normal file
9
crates/application/src/use_cases/get_users.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use domain::{errors::DomainError, models::UserSummary};
|
||||
use crate::{context::AppContext, queries::GetUsersQuery};
|
||||
|
||||
pub async fn execute(
|
||||
ctx: &AppContext,
|
||||
_query: GetUsersQuery,
|
||||
) -> Result<Vec<UserSummary>, DomainError> {
|
||||
ctx.user_repository.list_with_stats().await
|
||||
}
|
||||
@@ -2,17 +2,25 @@ use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{Movie, Review},
|
||||
value_objects::{Comment, ExternalMetadataId, MovieTitle, Rating, ReleaseYear, UserId},
|
||||
value_objects::{Comment, Rating, UserId},
|
||||
};
|
||||
|
||||
use crate::{commands::LogReviewCommand, context::AppContext};
|
||||
use crate::{
|
||||
commands::LogReviewCommand,
|
||||
context::AppContext,
|
||||
movie_resolver::{MovieResolver, MovieResolverDeps},
|
||||
};
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), DomainError> {
|
||||
let rating = Rating::new(cmd.rating)?;
|
||||
let user_id = UserId::from_uuid(cmd.user_id);
|
||||
let comment = cmd.comment.clone().map(Comment::new).transpose()?;
|
||||
|
||||
let (movie, is_new_movie) = resolve_movie(ctx, &cmd).await?;
|
||||
let deps = MovieResolverDeps {
|
||||
repository: ctx.repository.as_ref(),
|
||||
metadata_client: ctx.metadata_client.as_ref(),
|
||||
};
|
||||
let (movie, is_new_movie) = MovieResolver::default_pipeline().resolve(&cmd, &deps).await?;
|
||||
|
||||
ctx.repository.upsert_movie(&movie).await?;
|
||||
|
||||
@@ -24,76 +32,6 @@ pub async fn execute(ctx: &AppContext, cmd: LogReviewCommand) -> Result<(), Doma
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn resolve_movie(
|
||||
ctx: &AppContext,
|
||||
cmd: &LogReviewCommand,
|
||||
) -> Result<(Movie, bool), DomainError> {
|
||||
if let Some(ext_id_str) = &cmd.external_metadata_id {
|
||||
if let Some(resolved) = resolve_external_movie(ctx, ext_id_str).await? {
|
||||
return Ok(resolved);
|
||||
}
|
||||
}
|
||||
|
||||
resolve_manual_movie(ctx, cmd).await
|
||||
}
|
||||
|
||||
async fn resolve_external_movie(
|
||||
ctx: &AppContext,
|
||||
ext_id_str: &str,
|
||||
) -> Result<Option<(Movie, bool)>, DomainError> {
|
||||
let tmdb_id = ExternalMetadataId::new(ext_id_str.to_string())?;
|
||||
|
||||
if let Some(m) = ctx.repository.get_movie_by_external_id(&tmdb_id).await? {
|
||||
return Ok(Some((m, false)));
|
||||
}
|
||||
|
||||
match ctx.metadata_client.fetch_movie_metadata(&tmdb_id).await {
|
||||
Ok(m) => Ok(Some((m, true))),
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to fetch from TMDB, falling back to manual entry: {:?}",
|
||||
e
|
||||
);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn resolve_manual_movie(
|
||||
ctx: &AppContext,
|
||||
cmd: &LogReviewCommand,
|
||||
) -> Result<(Movie, bool), DomainError> {
|
||||
let title_str = cmd.manual_title.as_ref().ok_or_else(|| {
|
||||
DomainError::ValidationError(
|
||||
"Manual title required if TMDB fetch fails or is omitted".into(),
|
||||
)
|
||||
})?;
|
||||
let year_val = cmd.manual_release_year.ok_or_else(|| {
|
||||
DomainError::ValidationError(
|
||||
"Manual release year required if TMDB fetch fails or is omitted".into(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let title = MovieTitle::new(title_str.clone())?;
|
||||
let release_year = ReleaseYear::new(year_val)?;
|
||||
|
||||
let candidates = ctx
|
||||
.repository
|
||||
.get_movies_by_title_and_year(&title, &release_year)
|
||||
.await?;
|
||||
|
||||
let matched_movie = candidates
|
||||
.into_iter()
|
||||
.find(|m| m.is_manual_match(&title, &release_year, cmd.manual_director.as_deref()));
|
||||
|
||||
if let Some(existing_movie) = matched_movie {
|
||||
Ok((existing_movie, false))
|
||||
} else {
|
||||
let new_movie = Movie::new(None, title, release_year, cmd.manual_director.clone(), None);
|
||||
Ok((new_movie, true))
|
||||
}
|
||||
}
|
||||
|
||||
async fn publish_events(
|
||||
ctx: &AppContext,
|
||||
movie: &Movie,
|
||||
|
||||
39
crates/application/src/use_cases/login.rs
Normal file
39
crates/application/src/use_cases/login.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use uuid::Uuid;
|
||||
|
||||
use domain::{errors::DomainError, value_objects::Email};
|
||||
|
||||
use crate::{commands::LoginCommand, context::AppContext};
|
||||
|
||||
pub struct LoginResult {
|
||||
pub token: String,
|
||||
pub user_id: Uuid,
|
||||
pub email: String,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: LoginCommand) -> Result<LoginResult, DomainError> {
|
||||
let email = Email::new(cmd.email)?;
|
||||
let user = ctx
|
||||
.user_repository
|
||||
.find_by_email(&email)
|
||||
.await?
|
||||
.ok_or_else(|| DomainError::Unauthorized("Invalid credentials".into()))?;
|
||||
|
||||
let valid = ctx
|
||||
.password_hasher
|
||||
.verify(&cmd.password, user.password_hash())
|
||||
.await?;
|
||||
if !valid {
|
||||
return Err(DomainError::Unauthorized("Invalid credentials".into()));
|
||||
}
|
||||
|
||||
let generated = ctx.auth_service.generate_token(user.id()).await?;
|
||||
|
||||
Ok(LoginResult {
|
||||
token: generated.token,
|
||||
user_id: user.id().value(),
|
||||
email: user.email().value().to_string(),
|
||||
expires_at: generated.expires_at,
|
||||
})
|
||||
}
|
||||
@@ -1,4 +1,10 @@
|
||||
pub mod delete_review;
|
||||
pub mod get_activity_feed;
|
||||
pub mod get_diary;
|
||||
pub mod get_review_history;
|
||||
pub mod get_user_profile;
|
||||
pub mod get_users;
|
||||
pub mod log_review;
|
||||
pub mod login;
|
||||
pub mod register;
|
||||
pub mod sync_poster;
|
||||
|
||||
28
crates/application/src/use_cases/register.rs
Normal file
28
crates/application/src/use_cases/register.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use domain::{errors::DomainError, models::User, value_objects::Email};
|
||||
|
||||
use crate::{commands::RegisterCommand, context::AppContext};
|
||||
|
||||
const MIN_PASSWORD_LENGTH: usize = 8;
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> {
|
||||
if !ctx.config.allow_registration {
|
||||
return Err(DomainError::Unauthorized("Registration is disabled".into()));
|
||||
}
|
||||
|
||||
if cmd.password.len() < MIN_PASSWORD_LENGTH {
|
||||
return Err(DomainError::ValidationError(
|
||||
"Password must be at least 8 characters".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let email = Email::new(cmd.email)?;
|
||||
|
||||
if ctx.user_repository.find_by_email(&email).await?.is_some() {
|
||||
return Err(DomainError::ValidationError(
|
||||
"Email already registered".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let hash = ctx.password_hasher.hash(&cmd.password).await?;
|
||||
ctx.user_repository.save(&User::new(email, hash)).await
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
[package]
|
||||
name = "common"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
thiserror = { workspace = true }
|
||||
@@ -1 +0,0 @@
|
||||
pub mod errors;
|
||||
@@ -10,5 +10,4 @@ async-trait = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
|
||||
common = { workspace = true }
|
||||
email_address = "0.2.9"
|
||||
|
||||
@@ -13,4 +13,7 @@ pub enum DomainError {
|
||||
|
||||
#[error("Infrastructure failure: {0}")]
|
||||
InfrastructureError(String),
|
||||
|
||||
#[error("Unauthorized: {0}")]
|
||||
Unauthorized(String),
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ pub struct PageParams {
|
||||
|
||||
impl PageParams {
|
||||
const MAX_LIMIT: u32 = 100;
|
||||
const DEFAULT_LIMIT: u32 = 20;
|
||||
const DEFAULT_LIMIT: u32 = 5;
|
||||
|
||||
pub fn new(limit: Option<u32>, offset: Option<u32>) -> Result<Self, DomainError> {
|
||||
let l = limit.unwrap_or(Self::DEFAULT_LIMIT);
|
||||
|
||||
@@ -15,6 +15,7 @@ pub enum SortDirection {
|
||||
#[default]
|
||||
Descending,
|
||||
Ascending,
|
||||
ByRatingDesc,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@@ -22,6 +23,7 @@ pub struct DiaryFilter {
|
||||
pub sort_by: SortDirection,
|
||||
pub page: PageParams,
|
||||
pub movie_id: Option<MovieId>,
|
||||
pub user_id: Option<UserId>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -131,14 +133,6 @@ impl Review {
|
||||
comment: Option<Comment>,
|
||||
watched_at: NaiveDateTime,
|
||||
) -> Result<Self, DomainError> {
|
||||
let now = Utc::now().naive_utc();
|
||||
|
||||
if watched_at > now {
|
||||
return Err(DomainError::ValidationError(
|
||||
"watched_at cannot be in the future".into(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
id: ReviewId::generate(),
|
||||
movie_id,
|
||||
@@ -146,7 +140,7 @@ impl Review {
|
||||
rating,
|
||||
comment,
|
||||
watched_at,
|
||||
created_at: now,
|
||||
created_at: Utc::now().naive_utc(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -191,6 +185,11 @@ impl Review {
|
||||
pub fn created_at(&self) -> &NaiveDateTime {
|
||||
&self.created_at
|
||||
}
|
||||
/// Returns [star1_filled, star2_filled, ..., star5_filled]
|
||||
pub fn stars(&self) -> [bool; 5] {
|
||||
let r = self.rating.value();
|
||||
[r >= 1, r >= 2, r >= 3, r >= 4, r >= 5]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -250,6 +249,10 @@ impl User {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_persistence(id: UserId, email: Email, password_hash: PasswordHash) -> Self {
|
||||
Self { id, email, password_hash }
|
||||
}
|
||||
|
||||
pub fn update_password(&mut self, new_hash: PasswordHash) {
|
||||
self.password_hash = new_hash;
|
||||
}
|
||||
@@ -266,3 +269,90 @@ impl User {
|
||||
&self.password_hash
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FeedEntry {
|
||||
entry: DiaryEntry,
|
||||
user_email: String,
|
||||
}
|
||||
|
||||
impl FeedEntry {
|
||||
pub fn new(entry: DiaryEntry, user_email: String) -> Self {
|
||||
Self { entry, user_email }
|
||||
}
|
||||
pub fn movie(&self) -> &Movie { self.entry.movie() }
|
||||
pub fn review(&self) -> &Review { self.entry.review() }
|
||||
pub fn user_email(&self) -> &str { &self.user_email }
|
||||
pub fn user_display_name(&self) -> &str {
|
||||
self.user_email.split('@').next().unwrap_or(&self.user_email)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UserSummary {
|
||||
pub user_id: UserId,
|
||||
pub email: String,
|
||||
pub total_movies: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
}
|
||||
|
||||
impl UserSummary {
|
||||
pub fn display_name(&self) -> &str {
|
||||
self.email.split('@').next().unwrap_or(&self.email)
|
||||
}
|
||||
pub fn avg_rating_display(&self) -> String {
|
||||
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "—".to_string())
|
||||
}
|
||||
pub fn initial(&self) -> char {
|
||||
self.display_name().chars().next().unwrap_or('?').to_ascii_uppercase()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UserStats {
|
||||
pub total_movies: i64,
|
||||
pub avg_rating: Option<f64>,
|
||||
pub favorite_director: Option<String>,
|
||||
pub most_active_month: Option<String>,
|
||||
}
|
||||
|
||||
impl UserStats {
|
||||
pub fn avg_rating_display(&self) -> String {
|
||||
self.avg_rating.map(|r| format!("{:.1}", r)).unwrap_or_else(|| "—".to_string())
|
||||
}
|
||||
pub fn favorite_director_display(&self) -> &str {
|
||||
self.favorite_director.as_deref().unwrap_or("—")
|
||||
}
|
||||
pub fn most_active_month_display(&self) -> &str {
|
||||
self.most_active_month.as_deref().unwrap_or("—")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MonthActivity {
|
||||
pub year_month: String,
|
||||
pub month_label: String,
|
||||
pub count: i64,
|
||||
pub entries: Vec<DiaryEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MonthlyRating {
|
||||
pub year_month: String,
|
||||
pub month_label: String,
|
||||
pub avg_rating: f64,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DirectorStat {
|
||||
pub director: String,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UserTrends {
|
||||
pub monthly_ratings: Vec<MonthlyRating>,
|
||||
pub top_directors: Vec<DirectorStat>,
|
||||
pub max_director_count: i64,
|
||||
}
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
use crate::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
|
||||
models::{
|
||||
DiaryEntry, DiaryFilter, FeedEntry, Movie, Review, ReviewHistory, User, UserStats,
|
||||
UserTrends, UserSummary,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
value_objects::{
|
||||
ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl, ReleaseYear,
|
||||
UserId,
|
||||
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||
ReleaseYear, ReviewId, UserId,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -31,13 +36,35 @@ pub trait MovieRepository: Send + Sync {
|
||||
-> Result<Paginated<DiaryEntry>, DomainError>;
|
||||
|
||||
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError>;
|
||||
|
||||
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError>;
|
||||
|
||||
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError>;
|
||||
|
||||
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError>;
|
||||
|
||||
async fn query_activity_feed(
|
||||
&self,
|
||||
page: &PageParams,
|
||||
) -> Result<Paginated<FeedEntry>, DomainError>;
|
||||
|
||||
async fn get_user_stats(&self, user_id: &UserId) -> Result<UserStats, DomainError>;
|
||||
|
||||
async fn get_user_history(&self, user_id: &UserId) -> Result<Vec<DiaryEntry>, DomainError>;
|
||||
|
||||
async fn get_user_trends(&self, user_id: &UserId) -> Result<UserTrends, DomainError>;
|
||||
}
|
||||
|
||||
pub enum MetadataSearchCriteria {
|
||||
ImdbId(ExternalMetadataId),
|
||||
Title { title: String, year: Option<u16> },
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait MetadataClient: Send + Sync {
|
||||
async fn fetch_movie_metadata(
|
||||
&self,
|
||||
external_metadata_id: &ExternalMetadataId,
|
||||
criteria: &MetadataSearchCriteria,
|
||||
) -> Result<Movie, DomainError>;
|
||||
async fn get_poster_url(
|
||||
&self,
|
||||
@@ -61,11 +88,26 @@ pub trait PosterStorage: Send + Sync {
|
||||
async fn get_poster(&self, poster_path: &PosterPath) -> Result<Vec<u8>, DomainError>;
|
||||
}
|
||||
|
||||
pub struct GeneratedToken {
|
||||
pub token: String,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait AuthService: Send + Sync {
|
||||
async fn generate_token(&self, user_id: &UserId) -> Result<GeneratedToken, DomainError>;
|
||||
async fn validate_token(&self, token: &str) -> Result<UserId, DomainError>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait UserRepository: Send + Sync {
|
||||
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError>;
|
||||
async fn save(&self, user: &User) -> Result<(), DomainError>;
|
||||
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError>;
|
||||
|
||||
async fn list_with_stats(&self) -> Result<Vec<UserSummary>, DomainError>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EventPublisher: Send + Sync {
|
||||
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||
|
||||
@@ -14,6 +14,7 @@ thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
dotenvy = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
@@ -21,9 +22,15 @@ async-trait = { workspace = true }
|
||||
domain = { workspace = true }
|
||||
application = { workspace = true }
|
||||
auth = { workspace = true }
|
||||
metadata = { workspace = true }
|
||||
poster-fetcher = { workspace = true }
|
||||
poster-storage = { workspace = true }
|
||||
sqlite = { workspace = true }
|
||||
sqlx = { workspace = true }
|
||||
template-askama = { workspace = true }
|
||||
event-publisher = { workspace = true }
|
||||
rss = { workspace = true }
|
||||
infer = "0.19.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tower = { version = "0.5", features = ["util"] }
|
||||
|
||||
@@ -1,6 +1,23 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use application::{commands::LogReviewCommand, queries::GetDiaryQuery};
|
||||
use domain::{errors::DomainError, models::SortDirection};
|
||||
|
||||
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
T: std::str::FromStr,
|
||||
T::Err: std::fmt::Display,
|
||||
{
|
||||
let s = Option::<String>::deserialize(de)?;
|
||||
match s.as_deref() {
|
||||
None | Some("") => Ok(None),
|
||||
Some(s) => s.parse::<T>().map(Some).map_err(serde::de::Error::custom),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DiaryQueryParams {
|
||||
pub limit: Option<u32>,
|
||||
@@ -11,15 +28,37 @@ pub struct DiaryQueryParams {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LogReviewForm {
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub external_metadata_id: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub manual_title: Option<String>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub manual_release_year: Option<u16>,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub manual_director: Option<String>,
|
||||
pub rating: u8,
|
||||
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||
pub comment: Option<String>,
|
||||
pub watched_at: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LoginForm {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RegisterForm {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ErrorQuery {
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LogReviewRequest {
|
||||
pub external_metadata_id: Option<String>,
|
||||
@@ -78,12 +117,229 @@ pub struct LoginRequest {
|
||||
#[derive(Serialize)]
|
||||
pub struct LoginResponse {
|
||||
pub token: String,
|
||||
pub user_id: Uuid,
|
||||
pub email: String,
|
||||
pub expires_at: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RegisterRequest {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
pub struct LogReviewData {
|
||||
pub external_metadata_id: Option<String>,
|
||||
pub manual_title: Option<String>,
|
||||
pub manual_release_year: Option<u16>,
|
||||
pub manual_director: Option<String>,
|
||||
pub rating: u8,
|
||||
pub comment: Option<String>,
|
||||
pub watched_at: NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParseReviewError {
|
||||
pub field: &'static str,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl TryFrom<LogReviewForm> for LogReviewData {
|
||||
type Error = ParseReviewError;
|
||||
|
||||
fn try_from(form: LogReviewForm) -> Result<Self, Self::Error> {
|
||||
let watched_at = NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M:%S")
|
||||
.or_else(|_| NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M"))
|
||||
.map_err(|_| ParseReviewError {
|
||||
field: "watched_at",
|
||||
message: format!(
|
||||
"invalid date '{}'; expected YYYY-MM-DDTHH:MM[:SS]",
|
||||
form.watched_at
|
||||
),
|
||||
})?;
|
||||
Ok(Self {
|
||||
external_metadata_id: form.external_metadata_id.filter(|s| !s.trim().is_empty()),
|
||||
manual_title: form.manual_title,
|
||||
manual_release_year: form.manual_release_year,
|
||||
manual_director: form.manual_director,
|
||||
rating: form.rating,
|
||||
comment: form.comment,
|
||||
watched_at,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<LogReviewRequest> for LogReviewData {
|
||||
type Error = DomainError;
|
||||
|
||||
fn try_from(req: LogReviewRequest) -> Result<Self, Self::Error> {
|
||||
let watched_at = NaiveDateTime::parse_from_str(&req.watched_at, "%Y-%m-%dT%H:%M:%S")
|
||||
.map_err(|_| {
|
||||
DomainError::ValidationError(
|
||||
"invalid watched_at; expected YYYY-MM-DDTHH:MM:SS".into(),
|
||||
)
|
||||
})?;
|
||||
Ok(Self {
|
||||
external_metadata_id: req.external_metadata_id.filter(|s| !s.trim().is_empty()),
|
||||
manual_title: req.manual_title,
|
||||
manual_release_year: req.manual_release_year,
|
||||
manual_director: req.manual_director,
|
||||
rating: req.rating,
|
||||
comment: req.comment,
|
||||
watched_at,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl LogReviewData {
|
||||
pub fn into_command(self, user_id: Uuid) -> LogReviewCommand {
|
||||
LogReviewCommand {
|
||||
external_metadata_id: self.external_metadata_id,
|
||||
manual_title: self.manual_title,
|
||||
manual_release_year: self.manual_release_year,
|
||||
manual_director: self.manual_director,
|
||||
rating: self.rating,
|
||||
comment: self.comment,
|
||||
watched_at: self.watched_at,
|
||||
user_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DiaryQueryParams> for GetDiaryQuery {
|
||||
fn from(p: DiaryQueryParams) -> Self {
|
||||
GetDiaryQuery {
|
||||
limit: p.limit,
|
||||
offset: p.offset,
|
||||
sort_by: p.sort_by.as_deref().map(|s| {
|
||||
if s == "asc" {
|
||||
SortDirection::Ascending
|
||||
} else {
|
||||
SortDirection::Descending
|
||||
}
|
||||
}),
|
||||
movie_id: p.movie_id,
|
||||
user_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Default)]
|
||||
pub struct ProfileQueryParams {
|
||||
pub view: Option<String>,
|
||||
pub limit: Option<u32>,
|
||||
pub offset: Option<u32>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_form(watched_at: &str) -> LogReviewForm {
|
||||
LogReviewForm {
|
||||
external_metadata_id: None,
|
||||
manual_title: None,
|
||||
manual_release_year: None,
|
||||
manual_director: None,
|
||||
rating: 4,
|
||||
comment: None,
|
||||
watched_at: watched_at.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_request(watched_at: &str) -> LogReviewRequest {
|
||||
LogReviewRequest {
|
||||
external_metadata_id: None,
|
||||
manual_title: None,
|
||||
manual_release_year: None,
|
||||
manual_director: None,
|
||||
rating: 4,
|
||||
comment: None,
|
||||
watched_at: watched_at.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn form_accepts_datetime_with_seconds() {
|
||||
let data = LogReviewData::try_from(make_form("2024-03-15T20:30:00")).unwrap();
|
||||
assert_eq!(data.watched_at.format("%H:%M:%S").to_string(), "20:30:00");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn form_accepts_datetime_without_seconds() {
|
||||
let data = LogReviewData::try_from(make_form("2024-03-15T20:30")).unwrap();
|
||||
assert_eq!(data.watched_at.format("%H:%M").to_string(), "20:30");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn form_rejects_invalid_datetime() {
|
||||
assert!(LogReviewData::try_from(make_form("not-a-date")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn api_accepts_datetime_with_seconds() {
|
||||
let data = LogReviewData::try_from(make_request("2024-03-15T20:30:00")).unwrap();
|
||||
assert_eq!(data.watched_at.format("%H:%M:%S").to_string(), "20:30:00");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn api_rejects_datetime_without_seconds() {
|
||||
assert!(LogReviewData::try_from(make_request("2024-03-15T20:30")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn api_rejects_invalid_datetime() {
|
||||
assert!(LogReviewData::try_from(make_request("garbage")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn whitespace_external_id_becomes_none_in_form() {
|
||||
let mut form = make_form("2024-03-15T20:30:00");
|
||||
form.external_metadata_id = Some(" ".to_string());
|
||||
let data = LogReviewData::try_from(form).unwrap();
|
||||
assert!(data.external_metadata_id.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn whitespace_external_id_becomes_none_in_request() {
|
||||
let mut req = make_request("2024-03-15T20:30:00");
|
||||
req.external_metadata_id = Some(" ".to_string());
|
||||
let data = LogReviewData::try_from(req).unwrap();
|
||||
assert!(data.external_metadata_id.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn into_command_sets_user_id() {
|
||||
let data = LogReviewData::try_from(make_form("2024-03-15T20:30:00")).unwrap();
|
||||
let user_id = Uuid::new_v4();
|
||||
let cmd = data.into_command(user_id);
|
||||
assert_eq!(cmd.user_id, user_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sort_by_asc_string_becomes_ascending() {
|
||||
let params = DiaryQueryParams {
|
||||
sort_by: Some("asc".to_string()),
|
||||
limit: None,
|
||||
offset: None,
|
||||
movie_id: None,
|
||||
};
|
||||
let query = GetDiaryQuery::from(params);
|
||||
assert!(matches!(query.sort_by, Some(domain::models::SortDirection::Ascending)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sort_by_other_string_becomes_descending() {
|
||||
let params = DiaryQueryParams {
|
||||
sort_by: Some("desc".to_string()),
|
||||
limit: None,
|
||||
offset: None,
|
||||
movie_id: None,
|
||||
};
|
||||
let query = GetDiaryQuery::from(params);
|
||||
assert!(matches!(query.sort_by, Some(domain::models::SortDirection::Descending)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diary_response_serializes_correctly() {
|
||||
let resp = DiaryResponse {
|
||||
|
||||
@@ -18,6 +18,7 @@ impl IntoResponse for ApiError {
|
||||
DomainError::InvalidRating { .. } => (StatusCode::BAD_REQUEST, self.0.to_string()),
|
||||
DomainError::ValidationError(msg) => (StatusCode::BAD_REQUEST, msg),
|
||||
DomainError::NotFound(msg) => (StatusCode::NOT_FOUND, msg),
|
||||
DomainError::Unauthorized(msg) => (StatusCode::UNAUTHORIZED, msg),
|
||||
DomainError::InfrastructureError(_) => {
|
||||
tracing::error!("Internal Infrastructure Error: {:?}", self.0);
|
||||
(
|
||||
|
||||
179
crates/presentation/src/event_handlers.rs
Normal file
179
crates/presentation/src/event_handlers.rs
Normal file
@@ -0,0 +1,179 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, events::DomainEvent};
|
||||
use event_publisher::EventHandler;
|
||||
|
||||
pub struct PosterSyncHandler {
|
||||
ctx: AppContext,
|
||||
max_retries: u32,
|
||||
}
|
||||
|
||||
impl PosterSyncHandler {
|
||||
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
|
||||
Self { ctx, max_retries }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventHandler for PosterSyncHandler {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
let (movie_id, external_metadata_id) = match event {
|
||||
DomainEvent::MovieDiscovered {
|
||||
movie_id,
|
||||
external_metadata_id,
|
||||
} => (movie_id.value(), external_metadata_id.value().to_owned()),
|
||||
_ => return Ok(()),
|
||||
};
|
||||
|
||||
let mut last_err: Option<DomainError> = None;
|
||||
for attempt in 0..=self.max_retries {
|
||||
let cmd = SyncPosterCommand {
|
||||
movie_id,
|
||||
external_metadata_id: external_metadata_id.clone(),
|
||||
};
|
||||
match sync_poster::execute(&self.ctx, cmd).await {
|
||||
Ok(()) => return Ok(()),
|
||||
Err(e) => {
|
||||
if attempt < self.max_retries {
|
||||
let delay = Duration::from_secs(2u64.pow(attempt));
|
||||
tracing::warn!(
|
||||
attempt = attempt + 1,
|
||||
max_attempts = self.max_retries + 1,
|
||||
delay_secs = delay.as_secs(),
|
||||
"poster sync failed, retrying: {e}"
|
||||
);
|
||||
tokio::time::sleep(delay).await;
|
||||
}
|
||||
last_err = Some(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let err = last_err.expect("loop runs at least once and always sets last_err on Err");
|
||||
tracing::error!(
|
||||
attempts = self.max_retries + 1,
|
||||
"poster sync failed after all attempts: {err}"
|
||||
);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::sync::Arc;
|
||||
use application::config::AppConfig;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
||||
ports::{
|
||||
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
|
||||
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
|
||||
},
|
||||
value_objects::{
|
||||
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||
Rating, ReleaseYear, ReviewId, UserId,
|
||||
},
|
||||
};
|
||||
|
||||
// Panic-stub ports: each method panics so any accidental dispatch into a service
|
||||
// fails the test loudly rather than silently succeeding.
|
||||
struct PanicRepo;
|
||||
struct PanicMetadata;
|
||||
struct PanicFetcher;
|
||||
struct PanicStorage;
|
||||
struct PanicAuth;
|
||||
struct PanicHasher;
|
||||
struct PanicUserRepo;
|
||||
struct NoopPublisher;
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for PanicRepo {
|
||||
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
|
||||
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
|
||||
async fn get_review_by_id(&self, _: &ReviewId) -> Result<Option<Review>, DomainError> { panic!("unexpected") }
|
||||
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_stats(&self, _: &UserId) -> Result<domain::models::UserStats, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_history(&self, _: &UserId) -> Result<Vec<DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_user_trends(&self, _: &UserId) -> Result<domain::models::UserTrends, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for PanicMetadata {
|
||||
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
|
||||
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterFetcherClient for PanicFetcher {
|
||||
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterStorage for PanicStorage {
|
||||
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
|
||||
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AuthService for PanicAuth {
|
||||
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
|
||||
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PasswordHasher for PanicHasher {
|
||||
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
|
||||
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl UserRepository for PanicUserRepo {
|
||||
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||
async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for NoopPublisher {
|
||||
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
|
||||
}
|
||||
|
||||
fn panic_ctx() -> AppContext {
|
||||
AppContext {
|
||||
repository: Arc::new(PanicRepo),
|
||||
metadata_client: Arc::new(PanicMetadata),
|
||||
poster_fetcher: Arc::new(PanicFetcher),
|
||||
poster_storage: Arc::new(PanicStorage),
|
||||
event_publisher: Arc::new(NoopPublisher),
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(PanicUserRepo),
|
||||
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_logged_is_ignored() {
|
||||
let handler = PosterSyncHandler::new(panic_ctx(), 3);
|
||||
let event = DomainEvent::ReviewLogged {
|
||||
review_id: ReviewId::generate(),
|
||||
movie_id: MovieId::generate(),
|
||||
user_id: UserId::generate(),
|
||||
rating: Rating::new(4).unwrap(),
|
||||
watched_at: chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap().and_hms_opt(0, 0, 0).unwrap(),
|
||||
};
|
||||
assert!(handler.handle(&event).await.is_ok());
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
use axum::{
|
||||
extract::{FromRef, FromRequestParts},
|
||||
http::{header::AUTHORIZATION, request::Parts},
|
||||
http::{header, header::AUTHORIZATION, request::Parts},
|
||||
response::{IntoResponse, Redirect},
|
||||
};
|
||||
use domain::{errors::DomainError, value_objects::UserId};
|
||||
|
||||
@@ -23,8 +24,8 @@ where
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|v| v.strip_prefix("Bearer "))
|
||||
.ok_or_else(|| {
|
||||
ApiError(DomainError::ValidationError(
|
||||
"Missing auth token".into(),
|
||||
ApiError(DomainError::Unauthorized(
|
||||
"Missing or invalid auth token".into(),
|
||||
))
|
||||
})?;
|
||||
let user_id = app_state
|
||||
@@ -36,6 +37,64 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OptionalCookieUser(pub Option<UserId>);
|
||||
pub struct RequiredCookieUser(pub UserId);
|
||||
|
||||
fn extract_token_from_cookie(parts: &Parts) -> Option<String> {
|
||||
parts
|
||||
.headers
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|cookies| {
|
||||
cookies
|
||||
.split(';')
|
||||
.find_map(|c| c.trim().strip_prefix("token=").map(str::to_string))
|
||||
})
|
||||
}
|
||||
|
||||
impl<S> FromRequestParts<S> for OptionalCookieUser
|
||||
where
|
||||
AppState: FromRef<S>,
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = std::convert::Infallible;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let app_state = AppState::from_ref(state);
|
||||
let Some(token) = extract_token_from_cookie(parts) else {
|
||||
return Ok(OptionalCookieUser(None));
|
||||
};
|
||||
let user_id = app_state
|
||||
.app_ctx
|
||||
.auth_service
|
||||
.validate_token(&token)
|
||||
.await
|
||||
.ok();
|
||||
Ok(OptionalCookieUser(user_id))
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> FromRequestParts<S> for RequiredCookieUser
|
||||
where
|
||||
AppState: FromRef<S>,
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = axum::response::Response;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let app_state = AppState::from_ref(state);
|
||||
let token = extract_token_from_cookie(parts)
|
||||
.ok_or_else(|| Redirect::to("/login").into_response())?;
|
||||
let user_id = app_state
|
||||
.app_ctx
|
||||
.auth_service
|
||||
.validate_token(&token)
|
||||
.await
|
||||
.map_err(|_| Redirect::to("/login").into_response())?;
|
||||
Ok(RequiredCookieUser(user_id))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -58,10 +117,9 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn missing_auth_header_returns_400() {
|
||||
async fn missing_auth_header_returns_401() {
|
||||
use std::sync::Arc;
|
||||
use application::context::AppContext;
|
||||
use auth::StubAuthService;
|
||||
|
||||
struct PanicRepo;
|
||||
#[async_trait::async_trait]
|
||||
@@ -73,19 +131,39 @@ mod tests {
|
||||
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
|
||||
}
|
||||
|
||||
struct PanicRenderer;
|
||||
impl crate::ports::HtmlRenderer for PanicRenderer {
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>) -> Result<String, String> { panic!() }
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
|
||||
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
|
||||
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
|
||||
}
|
||||
|
||||
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher;
|
||||
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta { async fn fetch_movie_metadata(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||
struct PanicRssRenderer;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
|
||||
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher; struct PanicAuth; struct PanicUserRepo;
|
||||
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
|
||||
|
||||
let state = crate::state::AppState {
|
||||
app_ctx: AppContext {
|
||||
@@ -94,10 +172,13 @@ mod tests {
|
||||
poster_fetcher: Arc::new(PanicFetcher),
|
||||
poster_storage: Arc::new(PanicStorage),
|
||||
event_publisher: Arc::new(PanicEvent),
|
||||
auth_service: Arc::new(StubAuthService),
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(PanicUserRepo),
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer),
|
||||
rss_renderer: Arc::new(PanicRssRenderer),
|
||||
};
|
||||
|
||||
let app = test_router(state);
|
||||
@@ -111,6 +192,217 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
|
||||
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// Reusable helpers for cookie extractor tests
|
||||
async fn optional_cookie_handler(user: OptionalCookieUser) -> String {
|
||||
match user.0 {
|
||||
Some(id) => id.value().to_string(),
|
||||
None => "none".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn required_cookie_handler(user: RequiredCookieUser) -> String {
|
||||
user.0.value().to_string()
|
||||
}
|
||||
|
||||
fn test_router_optional(state: crate::state::AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/optional", get(optional_cookie_handler))
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
fn test_router_required(state: crate::state::AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/required", get(required_cookie_handler))
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
struct RejectingAuth;
|
||||
#[async_trait::async_trait]
|
||||
impl domain::ports::AuthService for RejectingAuth {
|
||||
async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() }
|
||||
async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> {
|
||||
Err(domain::errors::DomainError::Unauthorized("bad token".into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn panic_state() -> crate::state::AppState {
|
||||
use std::sync::Arc;
|
||||
use application::context::AppContext;
|
||||
struct PanicRepo2;
|
||||
#[async_trait::async_trait]
|
||||
impl domain::ports::MovieRepository for PanicRepo2 {
|
||||
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
|
||||
}
|
||||
struct PanicMeta2; struct PanicFetcher2; struct PanicStorage2; struct PanicEvent2; struct PanicHasher2; struct PanicUserRepo2;
|
||||
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta2 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher2 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage2 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent2 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher2 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth2 { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo2 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
|
||||
struct PanicRenderer2;
|
||||
impl crate::ports::HtmlRenderer for PanicRenderer2 {
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
|
||||
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
|
||||
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
|
||||
}
|
||||
struct PanicRssRenderer2;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer2 {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
struct PanicAuth2;
|
||||
crate::state::AppState {
|
||||
app_ctx: AppContext {
|
||||
repository: Arc::new(PanicRepo2),
|
||||
metadata_client: Arc::new(PanicMeta2),
|
||||
poster_fetcher: Arc::new(PanicFetcher2),
|
||||
poster_storage: Arc::new(PanicStorage2),
|
||||
event_publisher: Arc::new(PanicEvent2),
|
||||
auth_service: Arc::new(PanicAuth2),
|
||||
password_hasher: Arc::new(PanicHasher2),
|
||||
user_repository: Arc::new(PanicUserRepo2),
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer2),
|
||||
rss_renderer: Arc::new(PanicRssRenderer2),
|
||||
}
|
||||
}
|
||||
|
||||
fn rejecting_state() -> crate::state::AppState {
|
||||
use std::sync::Arc;
|
||||
use application::context::AppContext;
|
||||
struct PanicRepo3;
|
||||
#[async_trait::async_trait]
|
||||
impl domain::ports::MovieRepository for PanicRepo3 {
|
||||
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||
async fn query_activity_feed(&self, _: &domain::models::collections::PageParams) -> Result<domain::models::collections::Paginated<domain::models::FeedEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_stats(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserStats, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_history(&self, _: &domain::value_objects::UserId) -> Result<Vec<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||
async fn get_user_trends(&self, _: &domain::value_objects::UserId) -> Result<domain::models::UserTrends, domain::errors::DomainError> { panic!() }
|
||||
}
|
||||
struct PanicMeta3; struct PanicFetcher3; struct PanicStorage3; struct PanicEvent3; struct PanicHasher3; struct PanicUserRepo3;
|
||||
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta3 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher3 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage3 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent3 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher3 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo3 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn list_with_stats(&self) -> Result<Vec<domain::models::UserSummary>, domain::errors::DomainError> { panic!() } }
|
||||
struct PanicRenderer3;
|
||||
impl crate::ports::HtmlRenderer for PanicRenderer3 {
|
||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||
fn render_activity_feed_page(&self, _: application::ports::ActivityFeedPageData) -> Result<String, String> { panic!() }
|
||||
fn render_users_page(&self, _: application::ports::UsersPageData) -> Result<String, String> { panic!() }
|
||||
fn render_profile_page(&self, _: application::ports::ProfilePageData) -> Result<String, String> { panic!() }
|
||||
}
|
||||
struct PanicRssRenderer3;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer3 {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
crate::state::AppState {
|
||||
app_ctx: AppContext {
|
||||
repository: Arc::new(PanicRepo3),
|
||||
metadata_client: Arc::new(PanicMeta3),
|
||||
poster_fetcher: Arc::new(PanicFetcher3),
|
||||
poster_storage: Arc::new(PanicStorage3),
|
||||
event_publisher: Arc::new(PanicEvent3),
|
||||
auth_service: Arc::new(RejectingAuth),
|
||||
password_hasher: Arc::new(PanicHasher3),
|
||||
user_repository: Arc::new(PanicUserRepo3),
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer3),
|
||||
rss_renderer: Arc::new(PanicRssRenderer3),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn optional_cookie_user_returns_none_without_cookie() {
|
||||
let app = test_router_optional(panic_state());
|
||||
let response = app
|
||||
.oneshot(Request::builder().uri("/optional").body(Body::empty()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
|
||||
assert_eq!(&body[..], b"none");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn optional_cookie_user_returns_none_with_invalid_token() {
|
||||
let app = test_router_optional(rejecting_state());
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.uri("/optional")
|
||||
.header("cookie", "token=bad.token.here")
|
||||
.body(Body::empty())
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
|
||||
assert_eq!(&body[..], b"none");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn required_cookie_user_redirects_without_cookie() {
|
||||
let app = test_router_required(panic_state());
|
||||
let response = app
|
||||
.oneshot(Request::builder().uri("/required").body(Body::empty()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::SEE_OTHER);
|
||||
assert_eq!(response.headers().get("location").unwrap(), "/login");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn required_cookie_user_redirects_with_invalid_token() {
|
||||
let app = test_router_required(rejecting_state());
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.uri("/required")
|
||||
.header("cookie", "token=bad.token.here")
|
||||
.body(Body::empty())
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(response.status(), StatusCode::SEE_OTHER);
|
||||
assert_eq!(response.headers().get("location").unwrap(), "/login");
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user