Compare commits

..

7 Commits

Author SHA1 Message Date
819332522a cargo lock 2026-05-04 09:30:58 +02:00
79a06e6844 presentation wiring
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-05-04 09:30:20 +02:00
97a496553a chore: ignore .worktrees/ directory 2026-05-04 02:43:32 +02:00
5a58625265 feat(presentation): add initial structure with dtos, errors, extractors, handlers, and routes modules 2026-05-04 02:11:33 +02:00
6d9ac07dfc refactor(template-askama): clean up comments and improve code readability 2026-05-04 02:05:13 +02:00
b6a7cf9417 feat(template-askama): add Askama template adapter for diary entries 2026-05-04 02:04:52 +02:00
c4b39c9410 feat(sqlite): implement movie and review management with migrations
- Added SQL migrations for movies and reviews tables.
- Implemented SqliteMovieRepository with methods for upserting movies, saving reviews, and querying diary entries.
- Introduced models for database rows and conversion to domain models.
- Integrated async migration handling in the repository.
- Updated Cargo.toml files to include necessary dependencies for async operations and HTTP handling.
2026-05-04 01:59:52 +02:00
38 changed files with 2635 additions and 27 deletions

6
.gitignore vendored
View File

@@ -5,4 +5,8 @@
.vscode .vscode
.env .env
.env.prod .env.prod
*.db
.worktrees/

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "01a08873b7fa815ad98a56a0902b60414cfcdc2c7a8570351320c4bc425347c6"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "026e2afeb573707cb360fcdab8f6137aabfaf603b5ed57b98ac2888b4a0389ff"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false
]
},
"hash": "0963b9661182e139cd760bbabb0d6ea3a301a2a3adbdfdda4a88f333a1144c77"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE title = ? AND release_year = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "3047579c6ed13ce87aad9b9ce6300c02f0df3516979518976e13f9d9abc6a403"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "33d0dae7d16b0635c1c7eb5afd10824bb55af7cc7a854f590d326622863759d1"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "47f7cf95ce3450635b643ab710cadba96f40319140834d510bc5207b2552e055"
}

View File

@@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT COUNT(*) FROM reviews WHERE movie_id = ?",
"describe": {
"columns": [
{
"name": "COUNT(*)",
"ordinal": 0,
"type_info": "Integer"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "4b3074b532342c6356ee0e8e4d8c4a830f016234bb690e1f6240f02824d6d84f"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "630e092fcd33bc312befef352a98225e6e18e6079644b949258a39bf4b0fe3e5"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE external_metadata_id = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true,
false,
false,
true,
true
]
},
"hash": "7bc4aebcb94547976d3d7e063e4e908fc22b977b3cbf063ee93ffe4648c42011"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n external_metadata_id = excluded.external_metadata_id,\n title = excluded.title,\n release_year = excluded.release_year,\n director = excluded.director,\n poster_path = excluded.poster_path",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "7d7e23355ee0e442f2aa27e898dcfa40bdc4b09391afe04325f076157d9d84aa"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "af883f8b78f185077e2d3dcfaa0a6e62fbdfbf00c97c9b33b699dc631476181d"
}

View File

@@ -0,0 +1,92 @@
{
"db_name": "SQLite",
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "external_metadata_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "title",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "release_year",
"ordinal": 3,
"type_info": "Integer"
},
{
"name": "director",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "poster_path",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "review_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "movie_id",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "user_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "rating",
"ordinal": 9,
"type_info": "Integer"
},
{
"name": "comment",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "watched_at",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
true,
false,
false,
true,
true,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "affe1eb261283c09d4b1ce6e684681755f079a044ffec8ff2bd79cfd8efe16b8"
}

520
Cargo.lock generated
View File

@@ -2,6 +2,15 @@
# It is not intended for manual editing. # It is not intended for manual editing.
version = 4 version = 4
[[package]]
name = "aho-corasick"
version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "allocator-api2" name = "allocator-api2"
version = "0.2.21" version = "0.2.21"
@@ -33,6 +42,59 @@ dependencies = [
"uuid", "uuid",
] ]
[[package]]
name = "askama"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bf825125edd887a019d0a3a837dcc5499a68b0d034cc3eb594070c3e18addc"
dependencies = [
"askama_macros",
"itoa",
"percent-encoding",
"serde",
"serde_json",
]
[[package]]
name = "askama_derive"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1c7065972a130eafa84215f21352ae15b4a7393da48c1f5e103904490736738"
dependencies = [
"askama_parser",
"basic-toml",
"glob",
"memchr",
"proc-macro2",
"quote",
"rustc-hash",
"serde",
"serde_derive",
"syn",
]
[[package]]
name = "askama_macros"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e23b1d2c4bd39a41971f6124cef4cc6fd0540913ecb90919b69ab3bbe44ae1a"
dependencies = [
"askama_derive",
]
[[package]]
name = "askama_parser"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7db09fde9143e7ac4513358fb32ee32847125b63b18ea715afd487956da715da"
dependencies = [
"rustc-hash",
"serde",
"serde_derive",
"unicode-ident",
"winnow",
]
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.89" version = "0.1.89"
@@ -53,9 +115,19 @@ dependencies = [
"num-traits", "num-traits",
] ]
[[package]]
name = "atomic-waker"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
[[package]] [[package]]
name = "auth" name = "auth"
version = "0.1.0" version = "0.1.0"
dependencies = [
"async-trait",
"domain",
]
[[package]] [[package]]
name = "autocfg" name = "autocfg"
@@ -63,6 +135,70 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "axum"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31b698c5f9a010f6573133b09e0de5408834d0c82f8d7475a89fc1867a71cd90"
dependencies = [
"axum-core",
"axum-macros",
"bytes",
"form_urlencoded",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"itoa",
"matchit",
"memchr",
"mime",
"percent-encoding",
"pin-project-lite",
"serde_core",
"serde_json",
"serde_path_to_error",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tower",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "axum-core"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
dependencies = [
"bytes",
"futures-core",
"http",
"http-body",
"http-body-util",
"mime",
"pin-project-lite",
"sync_wrapper",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "axum-macros"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7aa268c23bfbbd2c4363b9cd302a4f504fb2a9dfe7e3451d66f35dd392e20aca"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.22.1" version = "0.22.1"
@@ -75,6 +211,15 @@ version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
[[package]]
name = "basic-toml"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "2.11.1" version = "2.11.1"
@@ -295,6 +440,16 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "errno"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "etcetera" name = "etcetera"
version = "0.8.0" version = "0.8.0"
@@ -454,6 +609,12 @@ dependencies = [
"wasip3", "wasip3",
] ]
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.15.5" version = "0.15.5"
@@ -519,6 +680,92 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "http"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
dependencies = [
"bytes",
"itoa",
]
[[package]]
name = "http-body"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http",
]
[[package]]
name = "http-body-util"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
dependencies = [
"bytes",
"futures-core",
"http",
"http-body",
"pin-project-lite",
]
[[package]]
name = "http-range-header"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c"
[[package]]
name = "httparse"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "httpdate"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "hyper"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca"
dependencies = [
"atomic-waker",
"bytes",
"futures-channel",
"futures-core",
"http",
"http-body",
"httparse",
"httpdate",
"itoa",
"pin-project-lite",
"smallvec",
"tokio",
]
[[package]]
name = "hyper-util"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
dependencies = [
"bytes",
"http",
"http-body",
"hyper",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]] [[package]]
name = "iana-time-zone" name = "iana-time-zone"
version = "0.1.65" version = "0.1.65"
@@ -753,6 +1000,21 @@ version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "matchers"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
dependencies = [
"regex-automata",
]
[[package]]
name = "matchit"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
[[package]] [[package]]
name = "md-5" name = "md-5"
version = "0.10.6" version = "0.10.6"
@@ -773,6 +1035,22 @@ checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
name = "metadata" name = "metadata"
version = "0.1.0" version = "0.1.0"
[[package]]
name = "mime"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mime_guess"
version = "2.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e"
dependencies = [
"mime",
"unicase",
]
[[package]] [[package]]
name = "mio" name = "mio"
version = "1.2.0" version = "1.2.0"
@@ -784,6 +1062,15 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "num-bigint-dig" name = "num-bigint-dig"
version = "0.8.6" version = "0.8.6"
@@ -940,6 +1227,28 @@ dependencies = [
[[package]] [[package]]
name = "presentation" name = "presentation"
version = "0.1.0" version = "0.1.0"
dependencies = [
"anyhow",
"application",
"async-trait",
"auth",
"axum",
"chrono",
"domain",
"http-body-util",
"serde",
"serde_json",
"sqlite",
"sqlx",
"template-askama",
"thiserror",
"tokio",
"tower",
"tower-http",
"tracing",
"tracing-subscriber",
"uuid",
]
[[package]] [[package]]
name = "prettyplease" name = "prettyplease"
@@ -1023,6 +1332,23 @@ dependencies = [
"bitflags", "bitflags",
] ]
[[package]]
name = "regex-automata"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a"
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.17.14" version = "0.17.14"
@@ -1061,6 +1387,12 @@ dependencies = [
name = "rss" name = "rss"
version = "0.1.0" version = "0.1.0"
[[package]]
name = "rustc-hash"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94300abf3f1ae2e2b8ffb7b58043de3d399c73fa6f4b73826402a5c457614dbe"
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.23.40" version = "0.23.40"
@@ -1162,6 +1494,17 @@ dependencies = [
"zmij", "zmij",
] ]
[[package]]
name = "serde_path_to_error"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
dependencies = [
"itoa",
"serde",
"serde_core",
]
[[package]] [[package]]
name = "serde_urlencoded" name = "serde_urlencoded"
version = "0.7.1" version = "0.7.1"
@@ -1196,12 +1539,31 @@ dependencies = [
"digest", "digest",
] ]
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]] [[package]]
name = "shlex" name = "shlex"
version = "1.3.0" version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signal-hook-registry"
version = "1.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b"
dependencies = [
"errno",
"libc",
]
[[package]] [[package]]
name = "signature" name = "signature"
version = "2.2.0" version = "2.2.0"
@@ -1264,6 +1626,7 @@ dependencies = [
"chrono", "chrono",
"domain", "domain",
"sqlx", "sqlx",
"tokio",
"tracing", "tracing",
"uuid", "uuid",
] ]
@@ -1496,6 +1859,12 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "sync_wrapper"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
[[package]] [[package]]
name = "synstructure" name = "synstructure"
version = "0.13.2" version = "0.13.2"
@@ -1507,6 +1876,16 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "template-askama"
version = "0.1.0"
dependencies = [
"application",
"askama",
"domain",
"serde",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.18" version = "2.0.18"
@@ -1527,6 +1906,15 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "thread_local"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "tinystr" name = "tinystr"
version = "0.8.3" version = "0.8.3"
@@ -1561,11 +1949,25 @@ dependencies = [
"bytes", "bytes",
"libc", "libc",
"mio", "mio",
"parking_lot",
"pin-project-lite", "pin-project-lite",
"signal-hook-registry",
"socket2", "socket2",
"tokio-macros",
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "tokio-macros"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "tokio-stream" name = "tokio-stream"
version = "0.1.18" version = "0.1.18"
@@ -1577,6 +1979,73 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "tokio-util"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098"
dependencies = [
"bytes",
"futures-core",
"futures-sink",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tower"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4"
dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tower-http"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
dependencies = [
"bitflags",
"bytes",
"futures-core",
"futures-util",
"http",
"http-body",
"http-body-util",
"http-range-header",
"httpdate",
"mime",
"mime_guess",
"percent-encoding",
"pin-project-lite",
"tokio",
"tokio-util",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tower-layer"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]] [[package]]
name = "tracing" name = "tracing"
version = "0.1.44" version = "0.1.44"
@@ -1607,6 +2076,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex-automata",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
] ]
[[package]] [[package]]
@@ -1615,6 +2114,12 @@ version = "1.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40ce102ab67701b8526c123c1bab5cbe42d7040ccfd0f64af1a385808d2f43de" checksum = "40ce102ab67701b8526c123c1bab5cbe42d7040ccfd0f64af1a385808d2f43de"
[[package]]
name = "unicase"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142"
[[package]] [[package]]
name = "unicode-bidi" name = "unicode-bidi"
version = "0.3.18" version = "0.3.18"
@@ -1684,6 +2189,12 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "valuable"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.15" version = "0.2.15"
@@ -2040,6 +2551,15 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ee1708bef14716a11bae175f579062d4554d95be2c6829f518df847b7b3fdd0"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "wit-bindgen" name = "wit-bindgen"
version = "0.51.0" version = "0.51.0"

View File

@@ -3,7 +3,7 @@ members = [
"crates/adapters/auth", "crates/adapters/auth",
"crates/adapters/metadata", "crates/adapters/metadata",
"crates/adapters/rss", "crates/adapters/rss",
"crates/adapters/sqlite", "crates/adapters/sqlite", "crates/adapters/template-askama",
"crates/application", "crates/application",
"crates/common", "crates/common",
"crates/domain", "crates/domain",
@@ -22,6 +22,8 @@ tracing-subscriber = { version = "0.3.23", features = ["env-filter"] }
async-trait = "0.1" async-trait = "0.1"
uuid = { version = "1.23.0", features = ["v4", "serde"] } uuid = { version = "1.23.0", features = ["v4", "serde"] }
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "sqlite", "uuid", "macros"] }
template-askama = { path = "crates/adapters/template-askama" }
domain = { path = "crates/domain" } domain = { path = "crates/domain" }
common = { path = "crates/common" } common = { path = "crates/common" }

View File

@@ -4,3 +4,5 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
async-trait = { workspace = true }
domain = { workspace = true }

View File

@@ -1,14 +1,13 @@
pub fn add(left: u64, right: u64) -> u64 { use async_trait::async_trait;
left + right use domain::{errors::DomainError, ports::AuthService, value_objects::UserId};
}
#[cfg(test)] pub struct StubAuthService;
mod tests {
use super::*;
#[test] #[async_trait]
fn it_works() { impl AuthService for StubAuthService {
let result = add(2, 2); async fn validate_token(&self, _token: &str) -> Result<UserId, DomainError> {
assert_eq!(result, 4); Err(DomainError::InfrastructureError(
"auth service not implemented".into(),
))
} }
} }

View File

@@ -16,3 +16,4 @@ uuid = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
async-trait = { workspace = true } async-trait = { workspace = true }
tokio = { workspace = true }

View File

@@ -0,0 +1,24 @@
CREATE TABLE IF NOT EXISTS movies (
id TEXT PRIMARY KEY NOT NULL,
external_metadata_id TEXT UNIQUE,
title TEXT NOT NULL,
release_year INTEGER NOT NULL,
director TEXT,
poster_path TEXT
);
CREATE INDEX IF NOT EXISTS idx_movies_title_year
ON movies (title, release_year);
CREATE TABLE IF NOT EXISTS reviews (
id TEXT PRIMARY KEY NOT NULL,
movie_id TEXT NOT NULL REFERENCES movies(id),
user_id TEXT NOT NULL,
rating INTEGER NOT NULL,
comment TEXT,
watched_at TEXT NOT NULL,
created_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_reviews_movie_id ON reviews (movie_id);
CREATE INDEX IF NOT EXISTS idx_reviews_watched_at ON reviews (watched_at);

View File

@@ -1,12 +1,21 @@
use async_trait::async_trait;
use domain::{ use domain::{
errors::DomainError, errors::DomainError,
events::DomainEvent, events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated}, models::{
DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, SortDirection,
collections::Paginated,
},
ports::MovieRepository, ports::MovieRepository,
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear}, value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear},
}; };
use sqlx::SqlitePool; use sqlx::SqlitePool;
mod migrations;
mod models;
use models::{DiaryRow, MovieRow, ReviewRow, datetime_to_str};
pub struct SqliteMovieRepository { pub struct SqliteMovieRepository {
pool: SqlitePool, pool: SqlitePool,
} }
@@ -16,23 +25,140 @@ impl SqliteMovieRepository {
Self { pool } Self { pool }
} }
pub async fn migrate(&self) -> Result<(), DomainError> {
migrations::run(&self.pool).await
}
fn map_err(e: sqlx::Error) -> DomainError { fn map_err(e: sqlx::Error) -> DomainError {
tracing::error!("Database error: {:?}", e); tracing::error!("Database error: {:?}", e);
DomainError::InfrastructureError("Database operation failed".into()) DomainError::InfrastructureError("Database operation failed".into())
} }
async fn count_diary_entries(&self, movie_id: Option<&str>) -> Result<i64, DomainError> {
match movie_id {
None => sqlx::query_scalar!("SELECT COUNT(*) FROM reviews")
.fetch_one(&self.pool)
.await
.map_err(Self::map_err),
Some(id) => {
sqlx::query_scalar!("SELECT COUNT(*) FROM reviews WHERE movie_id = ?", id)
.fetch_one(&self.pool)
.await
.map_err(Self::map_err)
}
}
}
async fn fetch_diary_rows(
&self,
movie_id: Option<&str>,
sort: &SortDirection,
limit: i64,
offset: i64,
) -> Result<Vec<DiaryRow>, DomainError> {
// sqlx macros require literal ORDER BY values; separate branches also let the
// query planner use the movie_id index instead of falling back to a filtered scan.
match (movie_id, sort) {
(None, SortDirection::Descending) => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
(None, SortDirection::Ascending) => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
ORDER BY r.watched_at ASC
LIMIT ? OFFSET ?",
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
(Some(id), SortDirection::Descending) => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at DESC
LIMIT ? OFFSET ?",
id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
(Some(id), SortDirection::Ascending) => sqlx::query_as!(
DiaryRow,
"SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,
r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at
FROM reviews r
INNER JOIN movies m ON m.id = r.movie_id
WHERE r.movie_id = ?
ORDER BY r.watched_at ASC
LIMIT ? OFFSET ?",
id,
limit,
offset
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err),
}
}
} }
#[async_trait::async_trait] #[async_trait]
impl MovieRepository for SqliteMovieRepository { impl MovieRepository for SqliteMovieRepository {
async fn get_movie_by_external_id( async fn get_movie_by_external_id(
&self, &self,
external_metadata_id: &ExternalMetadataId, external_metadata_id: &ExternalMetadataId,
) -> Result<Option<Movie>, DomainError> { ) -> Result<Option<Movie>, DomainError> {
todo!() let id = external_metadata_id.value();
sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE external_metadata_id = ?",
id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.map(MovieRow::to_domain)
.transpose()
} }
async fn get_movie_by_id(&self, movie_id: &MovieId) -> Result<Option<Movie>, DomainError> { async fn get_movie_by_id(&self, movie_id: &MovieId) -> Result<Option<Movie>, DomainError> {
todo!() let id = movie_id.value().to_string();
sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE id = ?",
id
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.map(MovieRow::to_domain)
.transpose()
} }
async fn get_movies_by_title_and_year( async fn get_movies_by_title_and_year(
@@ -40,25 +166,138 @@ impl MovieRepository for SqliteMovieRepository {
title: &MovieTitle, title: &MovieTitle,
year: &ReleaseYear, year: &ReleaseYear,
) -> Result<Vec<Movie>, DomainError> { ) -> Result<Vec<Movie>, DomainError> {
todo!() let title = title.value();
let year = year.value() as i64;
sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE title = ? AND release_year = ?",
title,
year
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?
.into_iter()
.map(MovieRow::to_domain)
.collect()
} }
async fn upsert_movie(&self, movie: &Movie) -> Result<(), DomainError> { async fn upsert_movie(&self, movie: &Movie) -> Result<(), DomainError> {
todo!() let id = movie.id().value().to_string();
let external_metadata_id = movie.external_metadata_id().map(|e| e.value().to_string());
let title = movie.title().value();
let release_year = movie.release_year().value() as i64;
let director = movie.director();
let poster_path = movie.poster_path().map(|p| p.value().to_string());
sqlx::query!(
"INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)
VALUES (?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
external_metadata_id = excluded.external_metadata_id,
title = excluded.title,
release_year = excluded.release_year,
director = excluded.director,
poster_path = excluded.poster_path",
id,
external_metadata_id,
title,
release_year,
director,
poster_path
)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(())
} }
async fn save_review(&self, review: &Review) -> Result<DomainEvent, DomainError> { async fn save_review(&self, review: &Review) -> Result<DomainEvent, DomainError> {
todo!() let id = review.id().value().to_string();
let movie_id = review.movie_id().value().to_string();
let user_id = review.user_id().value().to_string();
let rating = review.rating().value() as i64;
let comment = review.comment().map(|c| c.value().to_string());
let watched_at = datetime_to_str(review.watched_at());
let created_at = datetime_to_str(review.created_at());
sqlx::query!(
"INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?)",
id,
movie_id,
user_id,
rating,
comment,
watched_at,
created_at
)
.execute(&self.pool)
.await
.map_err(Self::map_err)?;
Ok(DomainEvent::ReviewLogged {
review_id: review.id().clone(),
movie_id: review.movie_id().clone(),
user_id: review.user_id().clone(),
rating: review.rating().clone(),
watched_at: *review.watched_at(),
})
} }
async fn query_diary( async fn query_diary(&self, filter: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> {
&self, let movie_id: Option<String> = filter.movie_id.as_ref().map(|id| id.value().to_string());
filter: &DiaryFilter, let limit = filter.page.limit as i64;
) -> Result<Paginated<DiaryEntry>, DomainError> { let offset = filter.page.offset as i64;
todo!()
let (total, rows) = tokio::try_join!(
self.count_diary_entries(movie_id.as_deref()),
self.fetch_diary_rows(movie_id.as_deref(), &filter.sort_by, limit, offset)
)?;
let items = rows
.into_iter()
.map(DiaryRow::to_domain)
.collect::<Result<Vec<_>, _>>()?;
Ok(Paginated {
items,
total_count: total as u64,
limit: filter.page.limit,
offset: filter.page.offset,
})
} }
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> { async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> {
todo!() let id_str = movie_id.value().to_string();
let movie = sqlx::query_as!(
MovieRow,
"SELECT id, external_metadata_id, title, release_year, director, poster_path
FROM movies WHERE id = ?",
id_str
)
.fetch_optional(&self.pool)
.await
.map_err(Self::map_err)?
.ok_or_else(|| DomainError::NotFound(format!("Movie {}", id_str)))?
.to_domain()?;
let viewings = sqlx::query_as!(
ReviewRow,
"SELECT id, movie_id, user_id, rating, comment, watched_at, created_at
FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
id_str
)
.fetch_all(&self.pool)
.await
.map_err(Self::map_err)?
.into_iter()
.map(ReviewRow::to_domain)
.collect::<Result<Vec<_>, _>>()?;
Ok(ReviewHistory::new(movie, viewings))
} }
} }

View File

@@ -0,0 +1,9 @@
use domain::errors::DomainError;
use sqlx::SqlitePool;
pub(crate) async fn run(pool: &SqlitePool) -> Result<(), DomainError> {
sqlx::migrate!("./migrations")
.run(pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}

View File

@@ -0,0 +1,126 @@
use chrono::NaiveDateTime;
use domain::{
errors::DomainError,
models::{DiaryEntry, Movie, Review},
value_objects::{
Comment, ExternalMetadataId, MovieId, MovieTitle, PosterPath, Rating, ReleaseYear,
ReviewId, UserId,
},
};
use uuid::Uuid;
#[derive(sqlx::FromRow)]
pub(crate) struct MovieRow {
pub id: String,
pub external_metadata_id: Option<String>,
pub title: String,
pub release_year: i64,
pub director: Option<String>,
pub poster_path: Option<String>,
}
impl MovieRow {
pub fn to_domain(self) -> Result<Movie, DomainError> {
let id = MovieId::from_uuid(parse_uuid(&self.id)?);
let external_metadata_id = self
.external_metadata_id
.map(ExternalMetadataId::new)
.transpose()?;
let title = MovieTitle::new(self.title)?;
let release_year = ReleaseYear::new(self.release_year as u16)?;
let poster_path = self.poster_path.map(PosterPath::new).transpose()?;
Ok(Movie::from_persistence(
id,
external_metadata_id,
title,
release_year,
self.director,
poster_path,
))
}
}
#[derive(sqlx::FromRow)]
pub(crate) struct ReviewRow {
pub id: String,
pub movie_id: String,
pub user_id: String,
pub rating: i64,
pub comment: Option<String>,
pub watched_at: String,
pub created_at: String,
}
impl ReviewRow {
pub fn to_domain(self) -> Result<Review, DomainError> {
let id = ReviewId::from_uuid(parse_uuid(&self.id)?);
let movie_id = MovieId::from_uuid(parse_uuid(&self.movie_id)?);
let user_id = UserId::from_uuid(parse_uuid(&self.user_id)?);
let rating = Rating::new(self.rating as u8)?;
let comment = self.comment.map(Comment::new).transpose()?;
let watched_at = parse_datetime(&self.watched_at)?;
let created_at = parse_datetime(&self.created_at)?;
Ok(Review::from_persistence(
id, movie_id, user_id, rating, comment, watched_at, created_at,
))
}
}
// Used by query_diary JOIN — r.id aliased to review_id to avoid ambiguity with m.id
#[derive(sqlx::FromRow)]
pub(crate) struct DiaryRow {
pub id: String,
pub external_metadata_id: Option<String>,
pub title: String,
pub release_year: i64,
pub director: Option<String>,
pub poster_path: Option<String>,
pub review_id: String,
pub movie_id: String,
pub user_id: String,
pub rating: i64,
pub comment: Option<String>,
pub watched_at: String,
pub created_at: String,
}
impl DiaryRow {
pub fn to_domain(self) -> Result<DiaryEntry, DomainError> {
let movie = MovieRow {
id: self.id,
external_metadata_id: self.external_metadata_id,
title: self.title,
release_year: self.release_year,
director: self.director,
poster_path: self.poster_path,
}
.to_domain()?;
let review = ReviewRow {
id: self.review_id,
movie_id: self.movie_id,
user_id: self.user_id,
rating: self.rating,
comment: self.comment,
watched_at: self.watched_at,
created_at: self.created_at,
}
.to_domain()?;
Ok(DiaryEntry::new(movie, review))
}
}
pub(crate) fn parse_uuid(s: &str) -> Result<Uuid, DomainError> {
Uuid::parse_str(s)
.map_err(|e| DomainError::InfrastructureError(format!("Invalid UUID '{}': {}", s, e)))
}
pub(crate) fn datetime_to_str(dt: &NaiveDateTime) -> String {
dt.format("%Y-%m-%d %H:%M:%S").to_string()
}
pub(crate) fn parse_datetime(s: &str) -> Result<NaiveDateTime, DomainError> {
NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S")
.map_err(|e| DomainError::InfrastructureError(format!("Invalid datetime '{}': {}", s, e)))
}

View File

@@ -0,0 +1,12 @@
[package]
name = "template-askama"
version = "0.1.0"
edition = "2024"
[dependencies]
askama = { version = "0.16.0" }
serde = { workspace = true }
domain = { workspace = true }
application = { workspace = true }

View File

@@ -0,0 +1,35 @@
use askama::Template;
use application::ports::HtmlRenderer;
use domain::models::{DiaryEntry, collections::Paginated};
#[derive(Template)]
#[template(path = "diary.html")]
struct DiaryTemplate<'a> {
entries: &'a [DiaryEntry],
current_offset: u32,
limit: u32,
has_more: bool,
}
pub struct AskamaHtmlRenderer;
impl AskamaHtmlRenderer {
pub fn new() -> Self {
Self {}
}
}
impl HtmlRenderer for AskamaHtmlRenderer {
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String> {
let has_more = (data.offset + data.limit) < data.total_count as u32;
let template = DiaryTemplate {
entries: &data.items,
current_offset: data.offset,
limit: data.limit,
has_more,
};
template.render().map_err(|e| e.to_string())
}
}

View File

@@ -0,0 +1,76 @@
<!-- crates/presentation/templates/diary.html -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>My Movie Diary</title>
<style>
/* Minimalist old-school styling */
body { font-family: monospace; max-width: 800px; margin: 0 auto; padding: 20px; }
.entry { border-bottom: 1px solid #ccc; padding: 10px 0; }
.poster { max-width: 100px; float: left; margin-right: 15px; }
.clear { clear: both; }
.error { color: red; }
</style>
</head>
<body>
<h1>Movie Diary</h1>
<!-- Zero-JS Form Submission -->
<form action="/reviews" method="POST">
<fieldset>
<legend>Log a Movie</legend>
<label for="tmdb_id">TMDB ID (Optional):</label>
<input type="text" name="external_metadata_id" id="tmdb_id"><br><br>
<label for="title">Title (Fallback):</label>
<input type="text" name="manual_title" id="title"><br><br>
<label for="year">Year (Fallback):</label>
<input type="number" name="manual_release_year" id="year" min="1888"><br><br>
<label for="rating">Rating (0-5):</label>
<input type="number" name="rating" id="rating" min="0" max="5" required><br><br>
<button type="submit">Log Movie</button>
</fieldset>
</form>
<hr>
<!-- Rendering the Domain Models -->
<div class="diary-entries">
{% for entry in entries %}
<div class="entry">
{% if let Some(poster) = entry.movie().poster_path() %}
<!-- Assuming you have a route to serve the raw images -->
<img src="/static/posters/{{ poster.value() }}" class="poster" alt="Poster">
{% endif %}
<h3>{{ entry.movie().title().value() }} ({{ entry.movie().release_year().value() }})</h3>
<p><strong>Rating:</strong> {{ entry.review().rating().value() }} / 5</p>
{% if let Some(comment) = entry.review().comment() %}
<p><em>"{{ comment.value() }}"</em></p>
{% endif %}
<p><small>Watched on: {{ entry.review().watched_at().format("%Y-%m-%d") }}</small></p>
<div class="clear"></div>
</div>
{% else %}
<p>No movies logged yet. Go watch something!</p>
{% endfor %}
</div>
<!-- Simple Pagination -->
<div>
{% if current_offset > 0 %}
<a href="/diary?offset={{ current_offset - limit }}">Previous Page</a>
{% endif %}
{% if has_more %}
<a href="/diary?offset={{ current_offset + limit }}">Next Page</a>
{% endif %}
</div>
</body>
</html>

View File

@@ -1,4 +1,5 @@
pub mod commands; pub mod commands;
pub mod context; pub mod context;
pub mod ports;
pub mod queries; pub mod queries;
pub mod use_cases; pub mod use_cases;

View File

@@ -0,0 +1,5 @@
use domain::models::{DiaryEntry, collections::Paginated};
pub trait HtmlRenderer: Send + Sync {
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String>;
}

View File

@@ -4,3 +4,27 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
axum = { version = "0.8.8", features = ["macros"] }
tower-http = { version = "0.6.8", features = ["fs", "trace", "tracing"] }
serde = { workspace = true }
serde_json = { workspace = true }
anyhow = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
tokio = { workspace = true }
uuid = { workspace = true }
chrono = { workspace = true }
async-trait = { workspace = true }
domain = { workspace = true }
application = { workspace = true }
auth = { workspace = true }
sqlite = { workspace = true }
sqlx = { workspace = true }
template-askama = { workspace = true }
[dev-dependencies]
tower = { version = "0.5", features = ["util"] }
http-body-util = "0.1"

View File

@@ -0,0 +1,118 @@
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Deserialize)]
pub struct DiaryQueryParams {
pub limit: Option<u32>,
pub offset: Option<u32>,
pub sort_by: Option<String>,
pub movie_id: Option<Uuid>,
}
#[derive(Deserialize)]
pub struct LogReviewForm {
pub external_metadata_id: Option<String>,
pub manual_title: Option<String>,
pub manual_release_year: Option<u16>,
pub manual_director: Option<String>,
pub rating: u8,
pub comment: Option<String>,
pub watched_at: String,
}
#[derive(Deserialize)]
pub struct LogReviewRequest {
pub external_metadata_id: Option<String>,
pub manual_title: Option<String>,
pub manual_release_year: Option<u16>,
pub manual_director: Option<String>,
pub rating: u8,
pub comment: Option<String>,
pub watched_at: String,
}
#[derive(Serialize)]
pub struct MovieDto {
pub id: Uuid,
pub title: String,
pub release_year: u16,
pub director: Option<String>,
pub poster_path: Option<String>,
}
#[derive(Serialize)]
pub struct ReviewDto {
pub id: Uuid,
pub rating: u8,
pub comment: Option<String>,
pub watched_at: String,
}
#[derive(Serialize)]
pub struct DiaryEntryDto {
pub movie: MovieDto,
pub review: ReviewDto,
}
#[derive(Serialize)]
pub struct DiaryResponse {
pub items: Vec<DiaryEntryDto>,
pub total_count: u64,
pub limit: u32,
pub offset: u32,
}
#[derive(Serialize)]
pub struct ReviewHistoryResponse {
pub movie: MovieDto,
pub viewings: Vec<ReviewDto>,
pub trend: String,
}
#[derive(Deserialize)]
pub struct LoginRequest {
pub email: String,
pub password: String,
}
#[derive(Serialize)]
pub struct LoginResponse {
pub token: String,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn diary_response_serializes_correctly() {
let resp = DiaryResponse {
items: vec![],
total_count: 0,
limit: 20,
offset: 0,
};
let json = serde_json::to_string(&resp).unwrap();
assert!(json.contains("\"total_count\":0"));
assert!(json.contains("\"items\":[]"));
}
#[test]
fn diary_query_params_fields_are_optional() {
let params = DiaryQueryParams {
limit: None,
offset: None,
sort_by: None,
movie_id: None,
};
assert!(params.limit.is_none());
assert!(params.sort_by.is_none());
}
#[test]
fn login_request_deserializes() {
let json = r#"{"email":"a@b.com","password":"secret"}"#;
let req: LoginRequest = serde_json::from_str(json).unwrap();
assert_eq!(req.email, "a@b.com");
}
}

View File

@@ -0,0 +1,32 @@
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use domain::errors::DomainError;
pub struct ApiError(pub DomainError);
impl From<DomainError> for ApiError {
fn from(err: DomainError) -> Self {
Self(err)
}
}
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
let (status, error_message) = match self.0 {
DomainError::InvalidRating { .. } => (StatusCode::BAD_REQUEST, self.0.to_string()),
DomainError::ValidationError(msg) => (StatusCode::BAD_REQUEST, msg),
DomainError::NotFound(msg) => (StatusCode::NOT_FOUND, msg),
DomainError::InfrastructureError(_) => {
tracing::error!("Internal Infrastructure Error: {:?}", self.0);
(
StatusCode::INTERNAL_SERVER_ERROR,
"Internal server error".to_string(),
)
}
};
(status, error_message).into_response()
}
}

View File

@@ -0,0 +1,116 @@
use axum::{
extract::{FromRef, FromRequestParts},
http::{header::AUTHORIZATION, request::Parts},
};
use domain::{errors::DomainError, value_objects::UserId};
use crate::{errors::ApiError, state::AppState};
pub struct AuthenticatedUser(pub UserId);
impl<S> FromRequestParts<S> for AuthenticatedUser
where
AppState: FromRef<S>,
S: Send + Sync,
{
type Rejection = ApiError;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
let app_state = AppState::from_ref(state);
let token = parts
.headers
.get(AUTHORIZATION)
.and_then(|v| v.to_str().ok())
.and_then(|v| v.strip_prefix("Bearer "))
.ok_or_else(|| {
ApiError(DomainError::ValidationError(
"Missing auth token".into(),
))
})?;
let user_id = app_state
.app_ctx
.auth_service
.validate_token(token)
.await?;
Ok(AuthenticatedUser(user_id))
}
}
#[cfg(test)]
mod tests {
use super::*;
use axum::{
body::Body,
http::{Request, StatusCode},
routing::get,
Router,
};
use tower::ServiceExt;
async fn protected_handler(user: AuthenticatedUser) -> String {
user.0.value().to_string()
}
fn test_router(state: crate::state::AppState) -> Router {
Router::new()
.route("/protected", get(protected_handler))
.with_state(state)
}
#[tokio::test]
async fn missing_auth_header_returns_400() {
use std::sync::Arc;
use application::context::AppContext;
use auth::StubAuthService;
struct PanicRepo;
#[async_trait::async_trait]
impl domain::ports::MovieRepository for PanicRepo {
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
}
struct PanicRenderer;
impl crate::ports::HtmlRenderer for PanicRenderer {
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>) -> Result<String, String> { panic!() }
}
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher;
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta { async fn fetch_movie_metadata(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
let state = crate::state::AppState {
app_ctx: AppContext {
repository: Arc::new(PanicRepo),
metadata_client: Arc::new(PanicMeta),
poster_fetcher: Arc::new(PanicFetcher),
poster_storage: Arc::new(PanicStorage),
event_publisher: Arc::new(PanicEvent),
auth_service: Arc::new(StubAuthService),
password_hasher: Arc::new(PanicHasher),
},
html_renderer: Arc::new(PanicRenderer),
};
let app = test_router(state);
let response = app
.oneshot(
Request::builder()
.uri("/protected")
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
}

View File

@@ -0,0 +1,255 @@
pub mod html {
use axum::{
extract::{Query, State},
response::{Html, IntoResponse, Redirect},
Form,
};
use chrono::NaiveDateTime;
use application::{
commands::LogReviewCommand,
queries::GetDiaryQuery,
use_cases::{get_diary, log_review},
};
use domain::{errors::DomainError, models::SortDirection};
use crate::{
dtos::{DiaryQueryParams, LogReviewForm},
errors::ApiError,
extractors::AuthenticatedUser,
state::AppState,
};
pub async fn get_diary_page(
State(state): State<AppState>,
Query(params): Query<DiaryQueryParams>,
) -> Result<impl IntoResponse, ApiError> {
let query = GetDiaryQuery {
limit: params.limit,
offset: params.offset,
sort_by: params.sort_by.as_deref().map(|s| {
if s == "asc" {
SortDirection::Ascending
} else {
SortDirection::Descending
}
}),
movie_id: params.movie_id,
};
let page = get_diary::execute(&state.app_ctx, query).await?;
let html = state
.html_renderer
.render_diary_page(&page)
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
Ok(Html(html))
}
pub async fn post_review(
State(state): State<AppState>,
user: AuthenticatedUser,
Form(form): Form<LogReviewForm>,
) -> Result<impl IntoResponse, ApiError> {
let watched_at = NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M:%S")
.map_err(|_| {
ApiError(DomainError::ValidationError(
"Invalid watched_at format, expected YYYY-MM-DDTHH:MM:SS".into(),
))
})?;
let cmd = LogReviewCommand {
external_metadata_id: form.external_metadata_id,
manual_title: form.manual_title,
manual_release_year: form.manual_release_year,
manual_director: form.manual_director,
user_id: user.0.value(),
rating: form.rating,
comment: form.comment,
watched_at,
};
log_review::execute(&state.app_ctx, cmd).await?;
Ok(Redirect::to("/diary"))
}
}
pub mod api {
use axum::{
Json,
extract::{Path, Query, State},
http::StatusCode,
response::IntoResponse,
};
use chrono::NaiveDateTime;
use uuid::Uuid;
use application::{
commands::{LogReviewCommand, SyncPosterCommand},
queries::{GetDiaryQuery, GetReviewHistoryQuery},
use_cases::{get_diary, get_review_history, log_review, sync_poster},
};
use domain::{
errors::DomainError,
models::{DiaryEntry, Movie, Review, SortDirection},
services::review_history::Trend,
value_objects::MovieId,
};
use crate::{
dtos::{
DiaryEntryDto, DiaryQueryParams, DiaryResponse, LoginRequest, LoginResponse,
LogReviewRequest, MovieDto, ReviewDto, ReviewHistoryResponse,
},
errors::ApiError,
extractors::AuthenticatedUser,
state::AppState,
};
pub async fn get_diary(
State(state): State<AppState>,
Query(params): Query<DiaryQueryParams>,
) -> Result<Json<DiaryResponse>, ApiError> {
let query = GetDiaryQuery {
limit: params.limit,
offset: params.offset,
sort_by: params.sort_by.as_deref().map(|s| {
if s == "asc" {
SortDirection::Ascending
} else {
SortDirection::Descending
}
}),
movie_id: params.movie_id,
};
let page = get_diary::execute(&state.app_ctx, query).await?;
Ok(Json(DiaryResponse {
items: page.items.iter().map(entry_to_dto).collect(),
total_count: page.total_count,
limit: page.limit,
offset: page.offset,
}))
}
pub async fn get_review_history(
State(state): State<AppState>,
Path(movie_id): Path<Uuid>,
) -> Result<Json<ReviewHistoryResponse>, ApiError> {
let (history, trend) = get_review_history::execute(
&state.app_ctx,
GetReviewHistoryQuery { movie_id },
)
.await?;
Ok(Json(ReviewHistoryResponse {
movie: movie_to_dto(history.movie()),
viewings: history.viewings().iter().map(review_to_dto).collect(),
trend: match trend {
Trend::Improved => "improved",
Trend::Declined => "declined",
Trend::Neutral => "neutral",
}
.to_string(),
}))
}
pub async fn post_review(
State(state): State<AppState>,
user: AuthenticatedUser,
Json(req): Json<LogReviewRequest>,
) -> Result<impl IntoResponse, ApiError> {
let watched_at = NaiveDateTime::parse_from_str(&req.watched_at, "%Y-%m-%dT%H:%M:%S")
.map_err(|_| {
ApiError(DomainError::ValidationError(
"Invalid watched_at format, expected YYYY-MM-DDTHH:MM:SS".into(),
))
})?;
let cmd = LogReviewCommand {
external_metadata_id: req.external_metadata_id,
manual_title: req.manual_title,
manual_release_year: req.manual_release_year,
manual_director: req.manual_director,
user_id: user.0.value(),
rating: req.rating,
comment: req.comment,
watched_at,
};
log_review::execute(&state.app_ctx, cmd).await?;
Ok(StatusCode::CREATED)
}
pub async fn sync_poster(
State(state): State<AppState>,
_user: AuthenticatedUser,
Path(movie_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let movie = state
.app_ctx
.repository
.get_movie_by_id(&MovieId::from_uuid(movie_id))
.await?
.ok_or_else(|| ApiError(DomainError::NotFound(format!("Movie {movie_id}"))))?;
let external_id = movie
.external_metadata_id()
.ok_or_else(|| {
ApiError(DomainError::ValidationError(
"Movie has no external metadata ID, cannot sync poster".into(),
))
})?
.value()
.to_string();
sync_poster::execute(
&state.app_ctx,
SyncPosterCommand {
movie_id,
external_metadata_id: external_id,
},
)
.await?;
Ok(StatusCode::NO_CONTENT)
}
pub async fn login(
State(_state): State<AppState>,
Json(_req): Json<LoginRequest>,
) -> Json<LoginResponse> {
Json(LoginResponse {
token: "stub-token".to_string(),
})
}
fn movie_to_dto(movie: &Movie) -> MovieDto {
MovieDto {
id: movie.id().value(),
title: movie.title().value().to_string(),
release_year: movie.release_year().value(),
director: movie.director().map(|d| d.to_string()),
poster_path: movie.poster_path().map(|p| p.value().to_string()),
}
}
fn review_to_dto(review: &Review) -> ReviewDto {
ReviewDto {
id: review.id().value(),
rating: review.rating().value(),
comment: review.comment().map(|c| c.value().to_string()),
watched_at: review.watched_at().to_string(),
}
}
fn entry_to_dto(entry: &DiaryEntry) -> DiaryEntryDto {
DiaryEntryDto {
movie: movie_to_dto(entry.movie()),
review: review_to_dto(entry.review()),
}
}
}

View File

@@ -0,0 +1,7 @@
pub mod dtos;
pub mod errors;
pub mod extractors;
pub mod handlers;
pub mod ports;
pub mod routes;
pub mod state;

View File

@@ -1,3 +1,153 @@
fn main() { use std::sync::Arc;
println!("Hello, world!");
use anyhow::Context;
use async_trait::async_trait;
use domain::{
errors::DomainError,
events::DomainEvent,
models::Movie,
ports::{EventPublisher, MetadataClient, PasswordHasher, PosterFetcherClient, PosterStorage},
value_objects::{ExternalMetadataId, MovieId, PasswordHash, PosterPath, PosterUrl},
};
use sqlx::SqlitePool;
use tokio::net::TcpListener;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
use application::context::AppContext;
use auth::StubAuthService;
use sqlite::SqliteMovieRepository;
use template_askama::AskamaHtmlRenderer;
use presentation::{routes, state::AppState};
struct StubMetadataClient;
#[async_trait]
impl MetadataClient for StubMetadataClient {
async fn fetch_movie_metadata(&self, _id: &ExternalMetadataId) -> Result<Movie, DomainError> {
Err(DomainError::InfrastructureError(
"metadata client not implemented".into(),
))
}
async fn get_poster_url(
&self,
_id: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
Err(DomainError::InfrastructureError(
"metadata client not implemented".into(),
))
}
}
struct StubPosterFetcher;
#[async_trait]
impl PosterFetcherClient for StubPosterFetcher {
async fn fetch_poster_bytes(&self, _url: &PosterUrl) -> Result<Vec<u8>, DomainError> {
Err(DomainError::InfrastructureError(
"poster fetcher not implemented".into(),
))
}
}
struct StubPosterStorage;
#[async_trait]
impl PosterStorage for StubPosterStorage {
async fn store_poster(
&self,
_movie_id: &MovieId,
_bytes: &[u8],
) -> Result<PosterPath, DomainError> {
Err(DomainError::InfrastructureError(
"poster storage not implemented".into(),
))
}
async fn get_poster(&self, _path: &PosterPath) -> Result<Vec<u8>, DomainError> {
Err(DomainError::InfrastructureError(
"poster storage not implemented".into(),
))
}
}
struct StubEventPublisher;
#[async_trait]
impl EventPublisher for StubEventPublisher {
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
Ok(())
}
}
struct StubPasswordHasher;
#[async_trait]
impl PasswordHasher for StubPasswordHasher {
async fn hash(&self, _plain: &str) -> Result<PasswordHash, DomainError> {
Err(DomainError::InfrastructureError(
"password hasher not implemented".into(),
))
}
async fn verify(&self, _plain: &str, _hash: &PasswordHash) -> Result<bool, DomainError> {
Err(DomainError::InfrastructureError(
"password hasher not implemented".into(),
))
}
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
init_tracing();
let state = wire_dependencies()
.await
.context("Failed to wire dependencies")?;
let app = routes::build_router(state);
let listener = TcpListener::bind("0.0.0.0:3000").await?;
tracing::info!("Listening on 0.0.0.0:3000");
axum::serve(listener, app).await?;
Ok(())
}
async fn wire_dependencies() -> anyhow::Result<AppState> {
let pool = SqlitePool::connect("sqlite://reviews.db")
.await
.context("Failed to connect to SQLite database")?;
let repo = SqliteMovieRepository::new(pool);
repo.migrate()
.await
.map_err(|e| anyhow::anyhow!("{}", e))
.context("Database migration failed")?;
let app_ctx = AppContext {
repository: Arc::new(repo),
metadata_client: Arc::new(StubMetadataClient),
poster_fetcher: Arc::new(StubPosterFetcher),
poster_storage: Arc::new(StubPosterStorage),
event_publisher: Arc::new(StubEventPublisher),
auth_service: Arc::new(StubAuthService),
password_hasher: Arc::new(StubPasswordHasher),
};
Ok(AppState {
app_ctx,
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
})
}
fn init_tracing() {
tracing_subscriber::registry()
.with(tracing_subscriber::EnvFilter::new(
std::env::var("RUST_LOG")
.unwrap_or_else(|_| "presentation=debug,tower_http=debug".into()),
))
.with(tracing_subscriber::fmt::layer())
.init();
} }

View File

@@ -0,0 +1 @@
pub use application::ports::HtmlRenderer;

View File

@@ -0,0 +1,37 @@
use axum::{Router, routing};
use tower_http::{services::ServeDir, trace::TraceLayer};
use crate::{handlers, state::AppState};
pub fn build_router(state: AppState) -> Router {
Router::new()
.merge(html_routes())
.merge(api_routes())
.nest_service("/static", ServeDir::new("static"))
.layer(TraceLayer::new_for_http())
.with_state(state)
}
fn html_routes() -> Router<AppState> {
Router::new()
.route("/diary", routing::get(handlers::html::get_diary_page))
.route("/reviews", routing::post(handlers::html::post_review))
}
fn api_routes() -> Router<AppState> {
Router::new().nest(
"/api",
Router::new()
.route("/diary", routing::get(handlers::api::get_diary))
.route(
"/movies/{id}/history",
routing::get(handlers::api::get_review_history),
)
.route("/reviews", routing::post(handlers::api::post_review))
.route(
"/movies/{id}/sync-poster",
routing::post(handlers::api::sync_poster),
)
.route("/auth/login", routing::post(handlers::api::login)),
)
}

View File

@@ -0,0 +1,11 @@
use std::sync::Arc;
use application::context::AppContext;
use crate::ports::HtmlRenderer;
#[derive(Clone)]
pub struct AppState {
pub app_ctx: AppContext,
pub html_renderer: Arc<dyn HtmlRenderer>,
}

View File

@@ -0,0 +1,164 @@
use std::sync::Arc;
use application::context::AppContext;
use async_trait::async_trait;
use auth::StubAuthService;
use axum::{
Router,
body::Body,
http::{Request, StatusCode},
};
use domain::{
errors::DomainError,
events::DomainEvent,
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, collections::Paginated},
ports::{EventPublisher, MetadataClient, PasswordHasher, PosterFetcherClient, PosterStorage},
value_objects::{ExternalMetadataId, MovieId, PasswordHash, PosterPath, PosterUrl},
};
use http_body_util::BodyExt;
use presentation::{routes, state::AppState};
use sqlite::SqliteMovieRepository;
use sqlx::SqlitePool;
use template_askama::AskamaHtmlRenderer;
use tower::ServiceExt;
struct NoopEventPublisher;
#[async_trait]
impl EventPublisher for NoopEventPublisher {
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> {
Ok(())
}
}
struct PanicMeta;
#[async_trait]
impl MetadataClient for PanicMeta {
async fn fetch_movie_metadata(&self, _: &ExternalMetadataId) -> Result<Movie, DomainError> {
panic!("metadata not wired in tests")
}
async fn get_poster_url(
&self,
_: &ExternalMetadataId,
) -> Result<Option<PosterUrl>, DomainError> {
panic!()
}
}
struct PanicFetcher;
#[async_trait]
impl PosterFetcherClient for PanicFetcher {
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> {
panic!()
}
}
struct PanicStorage;
#[async_trait]
impl PosterStorage for PanicStorage {
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> {
panic!()
}
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> {
panic!()
}
}
struct PanicHasher;
#[async_trait]
impl PasswordHasher for PanicHasher {
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> {
panic!()
}
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> {
panic!()
}
}
async fn test_app() -> Router {
let pool = SqlitePool::connect("sqlite::memory:")
.await
.expect("in-memory SQLite failed");
let repo = SqliteMovieRepository::new(pool);
repo.migrate().await.expect("migration failed");
let state = AppState {
app_ctx: AppContext {
repository: Arc::new(repo),
metadata_client: Arc::new(PanicMeta),
poster_fetcher: Arc::new(PanicFetcher),
poster_storage: Arc::new(PanicStorage),
event_publisher: Arc::new(NoopEventPublisher),
auth_service: Arc::new(StubAuthService),
password_hasher: Arc::new(PanicHasher),
},
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
};
routes::build_router(state)
}
#[tokio::test]
async fn get_api_diary_returns_empty_list() {
let app = test_app().await;
let response = app
.oneshot(
Request::builder()
.uri("/api/diary")
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let bytes = response.into_body().collect().await.unwrap().to_bytes();
let json: serde_json::Value = serde_json::from_slice(&bytes).unwrap();
assert_eq!(json["total_count"], 0);
assert_eq!(json["items"], serde_json::json!([]));
assert_eq!(json["limit"], 20);
assert_eq!(json["offset"], 0);
}
#[tokio::test]
async fn post_api_reviews_without_auth_returns_400() {
let app = test_app().await;
let response = app
.oneshot(
Request::builder()
.method("POST")
.uri("/api/reviews")
.header("content-type", "application/json")
.body(Body::from(
r#"{"rating":4,"watched_at":"2026-01-01T20:00:00","manual_title":"Dune","manual_release_year":2021}"#,
))
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn post_api_auth_login_returns_stub_token() {
let app = test_app().await;
let response = app
.oneshot(
Request::builder()
.method("POST")
.uri("/api/auth/login")
.header("content-type", "application/json")
.body(Body::from(r#"{"email":"a@b.com","password":"x"}"#))
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let bytes = response.into_body().collect().await.unwrap().to_bytes();
let json: serde_json::Value = serde_json::from_slice(&bytes).unwrap();
assert_eq!(json["token"], "stub-token");
}