Compare commits
31 Commits
f790fa2a0f
...
e31d99a240
| Author | SHA1 | Date | |
|---|---|---|---|
| e31d99a240 | |||
| 41fec1efa5 | |||
| 160c08d1c4 | |||
| 7aa6d7bf4d | |||
| 144f2f8e0c | |||
| cff64f7a6b | |||
| 5baff54cb9 | |||
| f94d2db8b1 | |||
| 48875a6e86 | |||
| 9387ae705b | |||
| 9871e21bc0 | |||
| fa8efbaa23 | |||
| d769a5b55c | |||
| 8e1fb1a974 | |||
| 6145b873f5 | |||
| cc668ae44d | |||
| e5097c22dd | |||
| 450468ef3d | |||
| 6e7c6467a7 | |||
| 7f815f8207 | |||
| 5df89200d4 | |||
| eb273dc277 | |||
| 5689db0ad7 | |||
| 5c70b8b8be | |||
| 4c547df04e | |||
| 602df8df22 | |||
| 5b69a3a7c0 | |||
| a38f78d261 | |||
| 17f90726e8 | |||
| 563f33212e | |||
| 8e5ac9f433 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -10,3 +10,5 @@
|
|||||||
*.db
|
*.db
|
||||||
|
|
||||||
.worktrees/
|
.worktrees/
|
||||||
|
.superpowers/
|
||||||
|
docs/
|
||||||
32
.sqlx/query-1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399.json
generated
Normal file
32
.sqlx/query-1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399.json
generated
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "email",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "password_hash",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
|
||||||
|
}
|
||||||
56
.sqlx/query-70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2.json
generated
Normal file
56
.sqlx/query-70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2.json
generated
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "movie_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "user_id",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rating",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "comment",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "watched_at",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "created_at",
|
||||||
|
"ordinal": 6,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "70ee6050284475b5641af712e5923ba2091b8b70b1885ca6518dfa4bb01fdac2"
|
||||||
|
}
|
||||||
12
.sqlx/query-e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883.json
generated
Normal file
12
.sqlx/query-e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883.json
generated
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "DELETE FROM movies WHERE id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "e431381ad41c1c2f7b9c89509d5e3f4c19cb52dcfff66772145cd80c53c16883"
|
||||||
|
}
|
||||||
12
.sqlx/query-f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc.json
generated
Normal file
12
.sqlx/query-f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc.json
generated
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "DELETE FROM reviews WHERE id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "f84e5483ca4210aec67b38cc1a9de4a42c12891025236abc48ea4f175292a6cc"
|
||||||
|
}
|
||||||
33
Cargo.lock
generated
33
Cargo.lock
generated
@@ -331,6 +331,17 @@ dependencies = [
|
|||||||
"shlex",
|
"shlex",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfb"
|
||||||
|
version = "0.7.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"fnv",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg-if"
|
name = "cfg-if"
|
||||||
version = "1.0.4"
|
version = "1.0.4"
|
||||||
@@ -681,6 +692,16 @@ dependencies = [
|
|||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "event-publisher"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"domain",
|
||||||
|
"tokio",
|
||||||
|
"tracing",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "find-msvc-tools"
|
name = "find-msvc-tools"
|
||||||
version = "0.1.9"
|
version = "0.1.9"
|
||||||
@@ -1235,6 +1256,15 @@ dependencies = [
|
|||||||
"serde_core",
|
"serde_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "infer"
|
||||||
|
version = "0.19.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7"
|
||||||
|
dependencies = [
|
||||||
|
"cfb",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipnet"
|
name = "ipnet"
|
||||||
version = "2.12.0"
|
version = "2.12.0"
|
||||||
@@ -1737,6 +1767,7 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"domain",
|
"domain",
|
||||||
|
"infer",
|
||||||
"object_store",
|
"object_store",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
@@ -1779,7 +1810,9 @@ dependencies = [
|
|||||||
"chrono",
|
"chrono",
|
||||||
"domain",
|
"domain",
|
||||||
"dotenvy",
|
"dotenvy",
|
||||||
|
"event-publisher",
|
||||||
"http-body-util",
|
"http-body-util",
|
||||||
|
"infer",
|
||||||
"metadata",
|
"metadata",
|
||||||
"poster-fetcher",
|
"poster-fetcher",
|
||||||
"poster-storage",
|
"poster-storage",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"crates/adapters/auth",
|
"crates/adapters/auth", "crates/adapters/event-publisher",
|
||||||
"crates/adapters/metadata", "crates/adapters/poster-fetcher", "crates/adapters/poster-storage",
|
"crates/adapters/metadata", "crates/adapters/poster-fetcher", "crates/adapters/poster-storage",
|
||||||
"crates/adapters/rss",
|
"crates/adapters/rss",
|
||||||
"crates/adapters/sqlite",
|
"crates/adapters/sqlite",
|
||||||
@@ -41,6 +41,7 @@ auth = { path = "crates/adapters/auth" }
|
|||||||
metadata = { path = "crates/adapters/metadata" }
|
metadata = { path = "crates/adapters/metadata" }
|
||||||
poster-fetcher = { path = "crates/adapters/poster-fetcher" }
|
poster-fetcher = { path = "crates/adapters/poster-fetcher" }
|
||||||
poster-storage = { path = "crates/adapters/poster-storage" }
|
poster-storage = { path = "crates/adapters/poster-storage" }
|
||||||
|
event-publisher = { path = "crates/adapters/event-publisher" }
|
||||||
rss = { path = "crates/adapters/rss" }
|
rss = { path = "crates/adapters/rss" }
|
||||||
sqlite = { path = "crates/adapters/sqlite" }
|
sqlite = { path = "crates/adapters/sqlite" }
|
||||||
template-askama = { path = "crates/adapters/template-askama" }
|
template-askama = { path = "crates/adapters/template-askama" }
|
||||||
|
|||||||
10
crates/adapters/event-publisher/Cargo.toml
Normal file
10
crates/adapters/event-publisher/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
[package]
|
||||||
|
name = "event-publisher"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
domain = { workspace = true }
|
||||||
|
async-trait = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
209
crates/adapters/event-publisher/src/lib.rs
Normal file
209
crates/adapters/event-publisher/src/lib.rs
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
|
||||||
|
pub struct EventPublisherConfig {
|
||||||
|
pub channel_buffer: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EventPublisherConfig {
|
||||||
|
pub fn from_env() -> Self {
|
||||||
|
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
|
||||||
|
.ok()
|
||||||
|
.and_then(|v| v.parse().ok())
|
||||||
|
.unwrap_or(128);
|
||||||
|
Self { channel_buffer }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait EventHandler: Send + Sync {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ChannelEventPublisher {
|
||||||
|
sender: mpsc::Sender<DomainEvent>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventPublisher for ChannelEventPublisher {
|
||||||
|
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
self.sender
|
||||||
|
.send(event.clone())
|
||||||
|
.await
|
||||||
|
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct EventWorker {
|
||||||
|
receiver: mpsc::Receiver<DomainEvent>,
|
||||||
|
handlers: Vec<Box<dyn EventHandler>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EventWorker {
|
||||||
|
pub async fn run(mut self) {
|
||||||
|
while let Some(event) = self.receiver.recv().await {
|
||||||
|
match &event {
|
||||||
|
DomainEvent::ReviewLogged {
|
||||||
|
review_id,
|
||||||
|
movie_id,
|
||||||
|
user_id,
|
||||||
|
rating,
|
||||||
|
watched_at,
|
||||||
|
} => {
|
||||||
|
tracing::info!(
|
||||||
|
review_id = %review_id.value(),
|
||||||
|
movie_id = %movie_id.value(),
|
||||||
|
user_id = %user_id.value(),
|
||||||
|
rating = rating.value(),
|
||||||
|
watched_at = %watched_at,
|
||||||
|
"event: review_logged"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
DomainEvent::MovieDiscovered {
|
||||||
|
movie_id,
|
||||||
|
external_metadata_id,
|
||||||
|
} => {
|
||||||
|
tracing::info!(
|
||||||
|
movie_id = %movie_id.value(),
|
||||||
|
external_id = external_metadata_id.value(),
|
||||||
|
"event: movie_discovered"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for handler in &self.handlers {
|
||||||
|
if let Err(e) = handler.handle(&event).await {
|
||||||
|
tracing::error!("event handler error: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("event worker shut down");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NoopEventPublisher;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventPublisher for NoopEventPublisher {
|
||||||
|
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_event_channel(
|
||||||
|
config: EventPublisherConfig,
|
||||||
|
handlers: Vec<Box<dyn EventHandler>>,
|
||||||
|
) -> (ChannelEventPublisher, EventWorker) {
|
||||||
|
let (tx, rx) = mpsc::channel(config.channel_buffer);
|
||||||
|
(
|
||||||
|
ChannelEventPublisher { sender: tx },
|
||||||
|
EventWorker {
|
||||||
|
receiver: rx,
|
||||||
|
handlers,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use domain::{
|
||||||
|
errors::DomainError,
|
||||||
|
events::DomainEvent,
|
||||||
|
value_objects::{ExternalMetadataId, MovieId},
|
||||||
|
};
|
||||||
|
|
||||||
|
struct RecordingHandler {
|
||||||
|
calls: Arc<Mutex<Vec<String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventHandler for RecordingHandler {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
let label = match event {
|
||||||
|
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
|
||||||
|
DomainEvent::ReviewLogged { .. } => "review_logged",
|
||||||
|
};
|
||||||
|
self.calls.lock().unwrap().push(label.to_string());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn single_handler_receives_event() {
|
||||||
|
let calls = Arc::new(Mutex::new(vec![]));
|
||||||
|
let handler = RecordingHandler { calls: Arc::clone(&calls) };
|
||||||
|
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||||
|
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
|
||||||
|
|
||||||
|
let handle = tokio::spawn(worker.run());
|
||||||
|
|
||||||
|
let event = DomainEvent::MovieDiscovered {
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
|
||||||
|
};
|
||||||
|
publisher.publish(&event).await.unwrap();
|
||||||
|
drop(publisher);
|
||||||
|
handle.await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn multiple_handlers_all_receive_event() {
|
||||||
|
let calls1 = Arc::new(Mutex::new(vec![]));
|
||||||
|
let calls2 = Arc::new(Mutex::new(vec![]));
|
||||||
|
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
|
||||||
|
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
|
||||||
|
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||||
|
let (publisher, worker) = create_event_channel(
|
||||||
|
config,
|
||||||
|
vec![Box::new(handler1), Box::new(handler2)],
|
||||||
|
);
|
||||||
|
|
||||||
|
let handle = tokio::spawn(worker.run());
|
||||||
|
|
||||||
|
let event = DomainEvent::MovieDiscovered {
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
|
||||||
|
};
|
||||||
|
publisher.publish(&event).await.unwrap();
|
||||||
|
drop(publisher);
|
||||||
|
handle.await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(calls1.lock().unwrap().len(), 1);
|
||||||
|
assert_eq!(calls2.lock().unwrap().len(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn handler_error_does_not_stop_worker() {
|
||||||
|
struct FailingHandler;
|
||||||
|
#[async_trait]
|
||||||
|
impl EventHandler for FailingHandler {
|
||||||
|
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
Err(DomainError::InfrastructureError("boom".into()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let calls = Arc::new(Mutex::new(vec![]));
|
||||||
|
let good = RecordingHandler { calls: Arc::clone(&calls) };
|
||||||
|
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||||
|
let (publisher, worker) = create_event_channel(
|
||||||
|
config,
|
||||||
|
vec![Box::new(FailingHandler), Box::new(good)],
|
||||||
|
);
|
||||||
|
|
||||||
|
let handle = tokio::spawn(worker.run());
|
||||||
|
|
||||||
|
let event = DomainEvent::MovieDiscovered {
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
|
||||||
|
};
|
||||||
|
publisher.publish(&event).await.unwrap();
|
||||||
|
drop(publisher);
|
||||||
|
handle.await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(calls.lock().unwrap().len(), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,6 +9,7 @@ anyhow = { workspace = true }
|
|||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
object_store = { workspace = true }
|
object_store = { workspace = true }
|
||||||
|
infer = "0.19.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
|||||||
@@ -7,9 +7,15 @@ use domain::{
|
|||||||
ports::PosterStorage,
|
ports::PosterStorage,
|
||||||
value_objects::{MovieId, PosterPath},
|
value_objects::{MovieId, PosterPath},
|
||||||
};
|
};
|
||||||
use object_store::{path::Path, ObjectStore};
|
use object_store::{Attribute, Attributes, PutOptions, path::Path, ObjectStore};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
fn detect_mime(bytes: &[u8]) -> &'static str {
|
||||||
|
infer::get(bytes)
|
||||||
|
.map(|t| t.mime_type())
|
||||||
|
.unwrap_or("application/octet-stream")
|
||||||
|
}
|
||||||
|
|
||||||
pub struct PosterStorageAdapter {
|
pub struct PosterStorageAdapter {
|
||||||
store: Arc<dyn ObjectStore>,
|
store: Arc<dyn ObjectStore>,
|
||||||
}
|
}
|
||||||
@@ -32,8 +38,12 @@ impl PosterStorage for PosterStorageAdapter {
|
|||||||
image_bytes: &[u8],
|
image_bytes: &[u8],
|
||||||
) -> Result<PosterPath, DomainError> {
|
) -> Result<PosterPath, DomainError> {
|
||||||
let path = Path::from(movie_id.value().to_string());
|
let path = Path::from(movie_id.value().to_string());
|
||||||
|
let mime = detect_mime(image_bytes);
|
||||||
|
let mut attributes = Attributes::new();
|
||||||
|
attributes.insert(Attribute::ContentType, mime.into());
|
||||||
|
let opts = PutOptions { attributes, ..Default::default() };
|
||||||
self.store
|
self.store
|
||||||
.put(&path, image_bytes.to_vec().into())
|
.put_opts(&path, image_bytes.to_vec().into(), opts)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||||
PosterPath::new(path.to_string())
|
PosterPath::new(path.to_string())
|
||||||
|
|||||||
@@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "external_metadata_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "title",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "release_year",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "director",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poster_path",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "review_id",
|
||||||
|
"ordinal": 6,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "movie_id",
|
||||||
|
"ordinal": 7,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "user_id",
|
||||||
|
"ordinal": 8,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rating",
|
||||||
|
"ordinal": 9,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "comment",
|
||||||
|
"ordinal": 10,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "watched_at",
|
||||||
|
"ordinal": 11,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "created_at",
|
||||||
|
"ordinal": 12,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 3
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "01a08873b7fa815ad98a56a0902b60414cfcdc2c7a8570351320c4bc425347c6"
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "external_metadata_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "title",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "release_year",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "director",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poster_path",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "review_id",
|
||||||
|
"ordinal": 6,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "movie_id",
|
||||||
|
"ordinal": 7,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "user_id",
|
||||||
|
"ordinal": 8,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rating",
|
||||||
|
"ordinal": 9,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "comment",
|
||||||
|
"ordinal": 10,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "watched_at",
|
||||||
|
"ordinal": 11,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "created_at",
|
||||||
|
"ordinal": 12,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 2
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "026e2afeb573707cb360fcdab8f6137aabfaf603b5ed57b98ac2888b4a0389ff"
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT COUNT(*) FROM reviews",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "COUNT(*)",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Integer"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 0
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "0963b9661182e139cd760bbabb0d6ea3a301a2a3adbdfdda4a88f333a1144c77"
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, email, password_hash FROM users WHERE email = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "email",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "password_hash",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "167481bb1692cc81531d9a5cd85425e43d09a6df97c335ac347f7cfd61acd171"
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "INSERT OR IGNORE INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 4
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "18de90feb13b9f467f06d0ce25332d9ea7eabc99d9f1a44694e5d10762606f82"
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "email",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "password_hash",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "1bc5a51762717e45292626052f0a65ac0b8a001798a2476ea86143c5565df399"
|
||||||
|
}
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE title = ? AND release_year = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "external_metadata_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "title",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "release_year",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "director",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poster_path",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 2
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "3047579c6ed13ce87aad9b9ce6300c02f0df3516979518976e13f9d9abc6a403"
|
||||||
|
}
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "external_metadata_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "title",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "release_year",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "director",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poster_path",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "33d0dae7d16b0635c1c7eb5afd10824bb55af7cc7a854f590d326622863759d1"
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n WHERE r.movie_id = ?\n ORDER BY r.watched_at DESC\n LIMIT ? OFFSET ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "external_metadata_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "title",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "release_year",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "director",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poster_path",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "review_id",
|
||||||
|
"ordinal": 6,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "movie_id",
|
||||||
|
"ordinal": 7,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "user_id",
|
||||||
|
"ordinal": 8,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rating",
|
||||||
|
"ordinal": 9,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "comment",
|
||||||
|
"ordinal": 10,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "watched_at",
|
||||||
|
"ordinal": 11,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "created_at",
|
||||||
|
"ordinal": 12,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 3
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "47f7cf95ce3450635b643ab710cadba96f40319140834d510bc5207b2552e055"
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT COUNT(*) FROM reviews WHERE movie_id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "COUNT(*)",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Integer"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "4b3074b532342c6356ee0e8e4d8c4a830f016234bb690e1f6240f02824d6d84f"
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "INSERT INTO reviews (id, movie_id, user_id, rating, comment, watched_at, created_at)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 7
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "630e092fcd33bc312befef352a98225e6e18e6079644b949258a39bf4b0fe3e5"
|
||||||
|
}
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, external_metadata_id, title, release_year, director, poster_path\n FROM movies WHERE external_metadata_id = ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "external_metadata_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "title",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "release_year",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "director",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poster_path",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "7bc4aebcb94547976d3d7e063e4e908fc22b977b3cbf063ee93ffe4648c42011"
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "INSERT INTO movies (id, external_metadata_id, title, release_year, director, poster_path)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(id) DO UPDATE SET\n external_metadata_id = excluded.external_metadata_id,\n title = excluded.title,\n release_year = excluded.release_year,\n director = excluded.director,\n poster_path = excluded.poster_path",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 6
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
},
|
||||||
|
"hash": "7d7e23355ee0e442f2aa27e898dcfa40bdc4b09391afe04325f076157d9d84aa"
|
||||||
|
}
|
||||||
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT id, movie_id, user_id, rating, comment, watched_at, created_at\n FROM reviews WHERE movie_id = ? ORDER BY watched_at ASC",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "movie_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "user_id",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rating",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "comment",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "watched_at",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "created_at",
|
||||||
|
"ordinal": 6,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "af883f8b78f185077e2d3dcfaa0a6e62fbdfbf00c97c9b33b699dc631476181d"
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"db_name": "SQLite",
|
||||||
|
"query": "SELECT m.id, m.external_metadata_id, m.title, m.release_year, m.director, m.poster_path,\n r.id AS review_id, r.movie_id, r.user_id, r.rating, r.comment, r.watched_at, r.created_at\n FROM reviews r\n INNER JOIN movies m ON m.id = r.movie_id\n ORDER BY r.watched_at ASC\n LIMIT ? OFFSET ?",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"ordinal": 0,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "external_metadata_id",
|
||||||
|
"ordinal": 1,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "title",
|
||||||
|
"ordinal": 2,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "release_year",
|
||||||
|
"ordinal": 3,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "director",
|
||||||
|
"ordinal": 4,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "poster_path",
|
||||||
|
"ordinal": 5,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "review_id",
|
||||||
|
"ordinal": 6,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "movie_id",
|
||||||
|
"ordinal": 7,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "user_id",
|
||||||
|
"ordinal": 8,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rating",
|
||||||
|
"ordinal": 9,
|
||||||
|
"type_info": "Integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "comment",
|
||||||
|
"ordinal": 10,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "watched_at",
|
||||||
|
"ordinal": 11,
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "created_at",
|
||||||
|
"ordinal": 12,
|
||||||
|
"type_info": "Text"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 2
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "affe1eb261283c09d4b1ce6e684681755f079a044ffec8ff2bd79cfd8efe16b8"
|
||||||
|
}
|
||||||
@@ -7,7 +7,7 @@ use domain::{
|
|||||||
collections::Paginated,
|
collections::Paginated,
|
||||||
},
|
},
|
||||||
ports::MovieRepository,
|
ports::MovieRepository,
|
||||||
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear},
|
value_objects::{ExternalMetadataId, MovieId, MovieTitle, ReleaseYear, ReviewId},
|
||||||
};
|
};
|
||||||
use sqlx::SqlitePool;
|
use sqlx::SqlitePool;
|
||||||
|
|
||||||
@@ -273,6 +273,39 @@ impl MovieRepository for SqliteMovieRepository {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError> {
|
||||||
|
let id = review_id.value().to_string();
|
||||||
|
sqlx::query_as!(
|
||||||
|
ReviewRow,
|
||||||
|
"SELECT id, movie_id, user_id, rating, comment, watched_at, created_at
|
||||||
|
FROM reviews WHERE id = ?",
|
||||||
|
id
|
||||||
|
)
|
||||||
|
.fetch_optional(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(Self::map_err)?
|
||||||
|
.map(ReviewRow::to_domain)
|
||||||
|
.transpose()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError> {
|
||||||
|
let id = review_id.value().to_string();
|
||||||
|
sqlx::query!("DELETE FROM reviews WHERE id = ?", id)
|
||||||
|
.execute(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(Self::map_err)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError> {
|
||||||
|
let id = movie_id.value().to_string();
|
||||||
|
sqlx::query!("DELETE FROM movies WHERE id = ?", id)
|
||||||
|
.execute(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(Self::map_err)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> {
|
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError> {
|
||||||
let id_str = movie_id.value().to_string();
|
let id_str = movie_id.value().to_string();
|
||||||
|
|
||||||
|
|||||||
@@ -73,4 +73,79 @@ impl UserRepository for SqliteUserRepository {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError> {
|
||||||
|
let id_str = id.value().to_string();
|
||||||
|
let row = sqlx::query!(
|
||||||
|
"SELECT id, email, password_hash FROM users WHERE id = ?",
|
||||||
|
id_str
|
||||||
|
)
|
||||||
|
.fetch_optional(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(Self::map_err)?;
|
||||||
|
|
||||||
|
match row {
|
||||||
|
None => Ok(None),
|
||||||
|
Some(r) => {
|
||||||
|
let uuid = uuid::Uuid::parse_str(&r.id)
|
||||||
|
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||||
|
let email = Email::new(r.email)
|
||||||
|
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||||
|
let hash = PasswordHash::new(r.password_hash)
|
||||||
|
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
|
||||||
|
Ok(Some(User::from_persistence(UserId::from_uuid(uuid), email, hash)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use sqlx::SqlitePool;
|
||||||
|
|
||||||
|
async fn setup() -> (SqlitePool, SqliteUserRepository) {
|
||||||
|
let pool = SqlitePool::connect(":memory:").await.unwrap();
|
||||||
|
sqlx::query(
|
||||||
|
"CREATE TABLE users (id TEXT PRIMARY KEY, email TEXT NOT NULL UNIQUE, password_hash TEXT NOT NULL, created_at TEXT NOT NULL)"
|
||||||
|
)
|
||||||
|
.execute(&pool)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let repo = SqliteUserRepository::new(pool.clone());
|
||||||
|
(pool, repo)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn find_by_id_returns_none_when_not_found() {
|
||||||
|
let (_, repo) = setup().await;
|
||||||
|
let result = repo
|
||||||
|
.find_by_id(&UserId::from_uuid(uuid::Uuid::new_v4()))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn find_by_id_returns_user_when_found() {
|
||||||
|
let (pool, repo) = setup().await;
|
||||||
|
let id = uuid::Uuid::new_v4();
|
||||||
|
sqlx::query(
|
||||||
|
"INSERT INTO users (id, email, password_hash, created_at) VALUES (?, ?, ?, ?)"
|
||||||
|
)
|
||||||
|
.bind(id.to_string())
|
||||||
|
.bind("test@example.com")
|
||||||
|
.bind("$argon2id$v=19$m=65536,t=2,p=1$fakesalt$fakehash")
|
||||||
|
.bind("2026-01-01T00:00:00Z")
|
||||||
|
.execute(&pool)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let result = repo
|
||||||
|
.find_by_id(&UserId::from_uuid(id))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(result.is_some());
|
||||||
|
assert_eq!(result.unwrap().email().value(), "test@example.com");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
use askama::Template;
|
use askama::Template;
|
||||||
use application::ports::HtmlRenderer;
|
use application::ports::{
|
||||||
|
HtmlPageContext, HtmlRenderer, LoginPageData, NewReviewPageData, RegisterPageData,
|
||||||
|
};
|
||||||
use domain::models::{DiaryEntry, collections::Paginated};
|
use domain::models::{DiaryEntry, collections::Paginated};
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
@@ -9,6 +11,28 @@ struct DiaryTemplate<'a> {
|
|||||||
current_offset: u32,
|
current_offset: u32,
|
||||||
limit: u32,
|
limit: u32,
|
||||||
has_more: bool,
|
has_more: bool,
|
||||||
|
ctx: &'a HtmlPageContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "login.html")]
|
||||||
|
struct LoginTemplate<'a> {
|
||||||
|
error: Option<&'a str>,
|
||||||
|
ctx: &'a HtmlPageContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "register.html")]
|
||||||
|
struct RegisterTemplate<'a> {
|
||||||
|
error: Option<&'a str>,
|
||||||
|
ctx: &'a HtmlPageContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "new_review.html")]
|
||||||
|
struct NewReviewTemplate<'a> {
|
||||||
|
error: Option<&'a str>,
|
||||||
|
ctx: &'a HtmlPageContext,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AskamaHtmlRenderer;
|
pub struct AskamaHtmlRenderer;
|
||||||
@@ -20,16 +44,43 @@ impl AskamaHtmlRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl HtmlRenderer for AskamaHtmlRenderer {
|
impl HtmlRenderer for AskamaHtmlRenderer {
|
||||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String> {
|
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String> {
|
||||||
let has_more = (data.offset + data.limit) < data.total_count as u32;
|
let has_more = (data.offset + data.limit) < data.total_count as u32;
|
||||||
|
DiaryTemplate {
|
||||||
let template = DiaryTemplate {
|
|
||||||
entries: &data.items,
|
entries: &data.items,
|
||||||
current_offset: data.offset,
|
current_offset: data.offset,
|
||||||
limit: data.limit,
|
limit: data.limit,
|
||||||
has_more,
|
has_more,
|
||||||
};
|
ctx: &ctx,
|
||||||
|
}
|
||||||
|
.render()
|
||||||
|
.map_err(|e| e.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
template.render().map_err(|e| e.to_string())
|
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String> {
|
||||||
|
LoginTemplate {
|
||||||
|
error: data.error,
|
||||||
|
ctx: &data.ctx,
|
||||||
|
}
|
||||||
|
.render()
|
||||||
|
.map_err(|e| e.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String> {
|
||||||
|
RegisterTemplate {
|
||||||
|
error: data.error,
|
||||||
|
ctx: &data.ctx,
|
||||||
|
}
|
||||||
|
.render()
|
||||||
|
.map_err(|e| e.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String> {
|
||||||
|
NewReviewTemplate {
|
||||||
|
error: data.error,
|
||||||
|
ctx: &data.ctx,
|
||||||
|
}
|
||||||
|
.render()
|
||||||
|
.map_err(|e| e.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
29
crates/adapters/template-askama/templates/base.html
Normal file
29
crates/adapters/template-askama/templates/base.html
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
<title>Movies Diary</title>
|
||||||
|
<link rel="stylesheet" href="/static/style.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<header>
|
||||||
|
<a href="/" class="site-title">Movies Diary</a>
|
||||||
|
<nav>
|
||||||
|
{% if let Some(email) = ctx.user_email %}
|
||||||
|
<a href="/reviews/new">Add Review</a>
|
||||||
|
<span class="user-email">{{ email }}</span>
|
||||||
|
<a href="/logout">Logout</a>
|
||||||
|
{% else %}
|
||||||
|
<a href="/login">Login</a>
|
||||||
|
{% if ctx.register_enabled %}
|
||||||
|
<a href="/register">Register</a>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</nav>
|
||||||
|
</header>
|
||||||
|
<main>
|
||||||
|
{% block content %}{% endblock %}
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
@@ -1,76 +1,45 @@
|
|||||||
<!-- crates/presentation/templates/diary.html -->
|
{% extends "base.html" %}
|
||||||
<!DOCTYPE html>
|
{% block content %}
|
||||||
<html lang="en">
|
<div class="diary">
|
||||||
<head>
|
{% for entry in entries %}
|
||||||
<meta charset="UTF-8">
|
<article class="entry">
|
||||||
<title>My Movie Diary</title>
|
{% if let Some(poster) = entry.movie().poster_path() %}
|
||||||
<style>
|
<div class="poster">
|
||||||
/* Minimalist old-school styling */
|
<img src="/posters/{{ poster.value() }}" alt="">
|
||||||
body { font-family: monospace; max-width: 800px; margin: 0 auto; padding: 20px; }
|
|
||||||
.entry { border-bottom: 1px solid #ccc; padding: 10px 0; }
|
|
||||||
.poster { max-width: 100px; float: left; margin-right: 15px; }
|
|
||||||
.clear { clear: both; }
|
|
||||||
.error { color: red; }
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>Movie Diary</h1>
|
|
||||||
|
|
||||||
<!-- Zero-JS Form Submission -->
|
|
||||||
<form action="/reviews" method="POST">
|
|
||||||
<fieldset>
|
|
||||||
<legend>Log a Movie</legend>
|
|
||||||
|
|
||||||
<label for="tmdb_id">TMDB ID (Optional):</label>
|
|
||||||
<input type="text" name="external_metadata_id" id="tmdb_id"><br><br>
|
|
||||||
|
|
||||||
<label for="title">Title (Fallback):</label>
|
|
||||||
<input type="text" name="manual_title" id="title"><br><br>
|
|
||||||
|
|
||||||
<label for="year">Year (Fallback):</label>
|
|
||||||
<input type="number" name="manual_release_year" id="year" min="1888"><br><br>
|
|
||||||
|
|
||||||
<label for="rating">Rating (0-5):</label>
|
|
||||||
<input type="number" name="rating" id="rating" min="0" max="5" required><br><br>
|
|
||||||
|
|
||||||
<button type="submit">Log Movie</button>
|
|
||||||
</fieldset>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
<!-- Rendering the Domain Models -->
|
|
||||||
<div class="diary-entries">
|
|
||||||
{% for entry in entries %}
|
|
||||||
<div class="entry">
|
|
||||||
{% if let Some(poster) = entry.movie().poster_path() %}
|
|
||||||
<!-- Assuming you have a route to serve the raw images -->
|
|
||||||
<img src="/static/posters/{{ poster.value() }}" class="poster" alt="Poster">
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<h3>{{ entry.movie().title().value() }} ({{ entry.movie().release_year().value() }})</h3>
|
|
||||||
<p><strong>Rating:</strong> {{ entry.review().rating().value() }} / 5</p>
|
|
||||||
|
|
||||||
{% if let Some(comment) = entry.review().comment() %}
|
|
||||||
<p><em>"{{ comment.value() }}"</em></p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<p><small>Watched on: {{ entry.review().watched_at().format("%Y-%m-%d") }}</small></p>
|
|
||||||
<div class="clear"></div>
|
|
||||||
</div>
|
|
||||||
{% else %}
|
|
||||||
<p>No movies logged yet. Go watch something!</p>
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
</div>
|
||||||
|
{% endif %}
|
||||||
<!-- Simple Pagination -->
|
<div class="entry-body">
|
||||||
<div>
|
<div class="entry-title">
|
||||||
{% if current_offset > 0 %}
|
{{ entry.movie().title().value() }}
|
||||||
<a href="/diary?offset={{ current_offset - limit }}">Previous Page</a>
|
<span class="year">({{ entry.movie().release_year().value() }})</span>
|
||||||
{% endif %}
|
</div>
|
||||||
{% if has_more %}
|
{% if let Some(dir) = entry.movie().director() %}
|
||||||
<a href="/diary?offset={{ current_offset + limit }}">Next Page</a>
|
<div class="director">{{ dir }}</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="rating">{{ entry.review().rating().value() }}/5</div>
|
||||||
|
{% if let Some(comment) = entry.review().comment() %}
|
||||||
|
<div class="comment">{{ comment.value() }}</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="watched-at">{{ entry.review().watched_at().format("%Y-%m-%d") }}</div>
|
||||||
|
{% if let Some(uid) = ctx.user_id %}
|
||||||
|
{% if *uid == entry.review().user_id().value() %}
|
||||||
|
<form method="post" action="/reviews/{{ entry.review().id().value() }}/delete">
|
||||||
|
<button type="submit">Delete</button>
|
||||||
|
</form>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</article>
|
||||||
</html>
|
{% else %}
|
||||||
|
<p class="empty">No movies logged yet.</p>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<nav class="pagination">
|
||||||
|
{% if current_offset > 0 %}
|
||||||
|
<a href="/?offset={{ current_offset - limit }}">← Prev</a>
|
||||||
|
{% endif %}
|
||||||
|
{% if has_more %}
|
||||||
|
<a href="/?offset={{ current_offset + limit }}">Next →</a>
|
||||||
|
{% endif %}
|
||||||
|
</nav>
|
||||||
|
{% endblock %}
|
||||||
|
|||||||
18
crates/adapters/template-askama/templates/login.html
Normal file
18
crates/adapters/template-askama/templates/login.html
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% block content %}
|
||||||
|
<h1>Login</h1>
|
||||||
|
{% if let Some(err) = error %}
|
||||||
|
<p class="error">{{ err }}</p>
|
||||||
|
{% endif %}
|
||||||
|
<form method="POST" action="/login">
|
||||||
|
<label>
|
||||||
|
Email<br>
|
||||||
|
<input type="email" name="email" required autocomplete="email">
|
||||||
|
</label>
|
||||||
|
<label>
|
||||||
|
Password<br>
|
||||||
|
<input type="password" name="password" required autocomplete="current-password">
|
||||||
|
</label>
|
||||||
|
<button type="submit">Login</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
40
crates/adapters/template-askama/templates/new_review.html
Normal file
40
crates/adapters/template-askama/templates/new_review.html
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% block content %}
|
||||||
|
<h1>Log a Review</h1>
|
||||||
|
{% if let Some(err) = error %}
|
||||||
|
<p class="error">{{ err }}</p>
|
||||||
|
{% endif %}
|
||||||
|
<form method="POST" action="/reviews">
|
||||||
|
<label>
|
||||||
|
OMDB ID <span class="optional">(optional)</span><br>
|
||||||
|
<input type="text" name="external_metadata_id" placeholder="tt0166924">
|
||||||
|
</label>
|
||||||
|
<hr>
|
||||||
|
<label>
|
||||||
|
Title<br>
|
||||||
|
<input type="text" name="manual_title">
|
||||||
|
</label>
|
||||||
|
<label>
|
||||||
|
Year<br>
|
||||||
|
<input type="number" name="manual_release_year" min="1888" max="2100">
|
||||||
|
</label>
|
||||||
|
<label>
|
||||||
|
Director<br>
|
||||||
|
<input type="text" name="manual_director">
|
||||||
|
</label>
|
||||||
|
<hr>
|
||||||
|
<label>
|
||||||
|
Rating (0–5)<br>
|
||||||
|
<input type="number" name="rating" min="0" max="5" required>
|
||||||
|
</label>
|
||||||
|
<label>
|
||||||
|
Watched<br>
|
||||||
|
<input type="datetime-local" name="watched_at" required>
|
||||||
|
</label>
|
||||||
|
<label>
|
||||||
|
Comment<br>
|
||||||
|
<textarea name="comment"></textarea>
|
||||||
|
</label>
|
||||||
|
<button type="submit">Log Review</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
18
crates/adapters/template-askama/templates/register.html
Normal file
18
crates/adapters/template-askama/templates/register.html
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% block content %}
|
||||||
|
<h1>Register</h1>
|
||||||
|
{% if let Some(err) = error %}
|
||||||
|
<p class="error">{{ err }}</p>
|
||||||
|
{% endif %}
|
||||||
|
<form method="POST" action="/register">
|
||||||
|
<label>
|
||||||
|
Email<br>
|
||||||
|
<input type="email" name="email" required autocomplete="email">
|
||||||
|
</label>
|
||||||
|
<label>
|
||||||
|
Password<br>
|
||||||
|
<input type="password" name="password" required autocomplete="new-password">
|
||||||
|
</label>
|
||||||
|
<button type="submit">Register</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
@@ -14,6 +14,7 @@ pub struct LogReviewCommand {
|
|||||||
pub watched_at: NaiveDateTime,
|
pub watched_at: NaiveDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct SyncPosterCommand {
|
pub struct SyncPosterCommand {
|
||||||
pub movie_id: Uuid,
|
pub movie_id: Uuid,
|
||||||
pub external_metadata_id: String,
|
pub external_metadata_id: String,
|
||||||
@@ -28,3 +29,8 @@ pub struct RegisterCommand {
|
|||||||
pub email: String,
|
pub email: String,
|
||||||
pub password: String,
|
pub password: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct DeleteReviewCommand {
|
||||||
|
pub review_id: Uuid,
|
||||||
|
pub requesting_user_id: Uuid,
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,7 +1,33 @@
|
|||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
use domain::models::{DiaryEntry, collections::Paginated};
|
use domain::models::{DiaryEntry, collections::Paginated};
|
||||||
|
|
||||||
|
pub struct HtmlPageContext {
|
||||||
|
pub user_email: Option<String>,
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub register_enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LoginPageData<'a> {
|
||||||
|
pub ctx: HtmlPageContext,
|
||||||
|
pub error: Option<&'a str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RegisterPageData<'a> {
|
||||||
|
pub ctx: HtmlPageContext,
|
||||||
|
pub error: Option<&'a str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NewReviewPageData<'a> {
|
||||||
|
pub ctx: HtmlPageContext,
|
||||||
|
pub error: Option<&'a str>,
|
||||||
|
}
|
||||||
|
|
||||||
pub trait HtmlRenderer: Send + Sync {
|
pub trait HtmlRenderer: Send + Sync {
|
||||||
fn render_diary_page(&self, data: &Paginated<DiaryEntry>) -> Result<String, String>;
|
fn render_diary_page(&self, data: &Paginated<DiaryEntry>, ctx: HtmlPageContext) -> Result<String, String>;
|
||||||
|
fn render_login_page(&self, data: LoginPageData<'_>) -> Result<String, String>;
|
||||||
|
fn render_register_page(&self, data: RegisterPageData<'_>) -> Result<String, String>;
|
||||||
|
fn render_new_review_page(&self, data: NewReviewPageData<'_>) -> Result<String, String>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait RssFeedRenderer: Send + Sync {
|
pub trait RssFeedRenderer: Send + Sync {
|
||||||
|
|||||||
27
crates/application/src/use_cases/delete_review.rs
Normal file
27
crates/application/src/use_cases/delete_review.rs
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
use domain::{errors::DomainError, value_objects::{ReviewId, UserId}};
|
||||||
|
use crate::{commands::DeleteReviewCommand, context::AppContext};
|
||||||
|
|
||||||
|
pub async fn execute(ctx: &AppContext, cmd: DeleteReviewCommand) -> Result<(), DomainError> {
|
||||||
|
let review_id = ReviewId::from_uuid(cmd.review_id);
|
||||||
|
let requesting_user_id = UserId::from_uuid(cmd.requesting_user_id);
|
||||||
|
|
||||||
|
let review = ctx
|
||||||
|
.repository
|
||||||
|
.get_review_by_id(&review_id)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| DomainError::NotFound(format!("review {}", cmd.review_id)))?;
|
||||||
|
|
||||||
|
if review.user_id() != &requesting_user_id {
|
||||||
|
return Err(DomainError::Unauthorized("not your review".into()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let movie_id = review.movie_id().clone();
|
||||||
|
ctx.repository.delete_review(&review_id).await?;
|
||||||
|
|
||||||
|
let history = ctx.repository.get_review_history(&movie_id).await?;
|
||||||
|
if history.viewings().is_empty() {
|
||||||
|
ctx.repository.delete_movie(&movie_id).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
pub mod delete_review;
|
||||||
pub mod get_diary;
|
pub mod get_diary;
|
||||||
pub mod get_review_history;
|
pub mod get_review_history;
|
||||||
pub mod log_review;
|
pub mod log_review;
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use crate::{
|
|||||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
||||||
value_objects::{
|
value_objects::{
|
||||||
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||||
ReleaseYear, UserId,
|
ReleaseYear, ReviewId, UserId,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -32,6 +32,12 @@ pub trait MovieRepository: Send + Sync {
|
|||||||
-> Result<Paginated<DiaryEntry>, DomainError>;
|
-> Result<Paginated<DiaryEntry>, DomainError>;
|
||||||
|
|
||||||
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError>;
|
async fn get_review_history(&self, movie_id: &MovieId) -> Result<ReviewHistory, DomainError>;
|
||||||
|
|
||||||
|
async fn get_review_by_id(&self, review_id: &ReviewId) -> Result<Option<Review>, DomainError>;
|
||||||
|
|
||||||
|
async fn delete_review(&self, review_id: &ReviewId) -> Result<(), DomainError>;
|
||||||
|
|
||||||
|
async fn delete_movie(&self, movie_id: &MovieId) -> Result<(), DomainError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum MetadataSearchCriteria {
|
pub enum MetadataSearchCriteria {
|
||||||
@@ -82,6 +88,7 @@ pub trait AuthService: Send + Sync {
|
|||||||
pub trait UserRepository: Send + Sync {
|
pub trait UserRepository: Send + Sync {
|
||||||
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError>;
|
async fn find_by_email(&self, email: &Email) -> Result<Option<User>, DomainError>;
|
||||||
async fn save(&self, user: &User) -> Result<(), DomainError>;
|
async fn save(&self, user: &User) -> Result<(), DomainError>;
|
||||||
|
async fn find_by_id(&self, id: &UserId) -> Result<Option<User>, DomainError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
|
|||||||
@@ -28,7 +28,9 @@ poster-storage = { workspace = true }
|
|||||||
sqlite = { workspace = true }
|
sqlite = { workspace = true }
|
||||||
sqlx = { workspace = true }
|
sqlx = { workspace = true }
|
||||||
template-askama = { workspace = true }
|
template-askama = { workspace = true }
|
||||||
|
event-publisher = { workspace = true }
|
||||||
rss = { workspace = true }
|
rss = { workspace = true }
|
||||||
|
infer = "0.19.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tower = { version = "0.5", features = ["util"] }
|
tower = { version = "0.5", features = ["util"] }
|
||||||
|
|||||||
@@ -1,6 +1,19 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
T: std::str::FromStr,
|
||||||
|
T::Err: std::fmt::Display,
|
||||||
|
{
|
||||||
|
let s = Option::<String>::deserialize(de)?;
|
||||||
|
match s.as_deref() {
|
||||||
|
None | Some("") => Ok(None),
|
||||||
|
Some(s) => s.parse::<T>().map(Some).map_err(serde::de::Error::custom),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct DiaryQueryParams {
|
pub struct DiaryQueryParams {
|
||||||
pub limit: Option<u32>,
|
pub limit: Option<u32>,
|
||||||
@@ -11,15 +24,37 @@ pub struct DiaryQueryParams {
|
|||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct LogReviewForm {
|
pub struct LogReviewForm {
|
||||||
|
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||||
pub external_metadata_id: Option<String>,
|
pub external_metadata_id: Option<String>,
|
||||||
|
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||||
pub manual_title: Option<String>,
|
pub manual_title: Option<String>,
|
||||||
|
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||||
pub manual_release_year: Option<u16>,
|
pub manual_release_year: Option<u16>,
|
||||||
|
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||||
pub manual_director: Option<String>,
|
pub manual_director: Option<String>,
|
||||||
pub rating: u8,
|
pub rating: u8,
|
||||||
|
#[serde(default, deserialize_with = "empty_string_as_none")]
|
||||||
pub comment: Option<String>,
|
pub comment: Option<String>,
|
||||||
pub watched_at: String,
|
pub watched_at: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct LoginForm {
|
||||||
|
pub email: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct RegisterForm {
|
||||||
|
pub email: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct ErrorQuery {
|
||||||
|
pub error: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct LogReviewRequest {
|
pub struct LogReviewRequest {
|
||||||
pub external_metadata_id: Option<String>,
|
pub external_metadata_id: Option<String>,
|
||||||
|
|||||||
174
crates/presentation/src/event_handlers.rs
Normal file
174
crates/presentation/src/event_handlers.rs
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use domain::{errors::DomainError, events::DomainEvent};
|
||||||
|
use event_publisher::EventHandler;
|
||||||
|
|
||||||
|
pub struct PosterSyncHandler {
|
||||||
|
ctx: AppContext,
|
||||||
|
max_retries: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PosterSyncHandler {
|
||||||
|
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
|
||||||
|
Self { ctx, max_retries }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventHandler for PosterSyncHandler {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
let (movie_id, external_metadata_id) = match event {
|
||||||
|
DomainEvent::MovieDiscovered {
|
||||||
|
movie_id,
|
||||||
|
external_metadata_id,
|
||||||
|
} => (movie_id.value(), external_metadata_id.value().to_owned()),
|
||||||
|
_ => return Ok(()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut last_err: Option<DomainError> = None;
|
||||||
|
for attempt in 0..=self.max_retries {
|
||||||
|
let cmd = SyncPosterCommand {
|
||||||
|
movie_id,
|
||||||
|
external_metadata_id: external_metadata_id.clone(),
|
||||||
|
};
|
||||||
|
match sync_poster::execute(&self.ctx, cmd).await {
|
||||||
|
Ok(()) => return Ok(()),
|
||||||
|
Err(e) => {
|
||||||
|
if attempt < self.max_retries {
|
||||||
|
let delay = Duration::from_secs(2u64.pow(attempt));
|
||||||
|
tracing::warn!(
|
||||||
|
attempt = attempt + 1,
|
||||||
|
max_attempts = self.max_retries + 1,
|
||||||
|
delay_secs = delay.as_secs(),
|
||||||
|
"poster sync failed, retrying: {e}"
|
||||||
|
);
|
||||||
|
tokio::time::sleep(delay).await;
|
||||||
|
}
|
||||||
|
last_err = Some(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let err = last_err.expect("loop runs at least once and always sets last_err on Err");
|
||||||
|
tracing::error!(
|
||||||
|
attempts = self.max_retries + 1,
|
||||||
|
"poster sync failed after all attempts: {err}"
|
||||||
|
);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use application::config::AppConfig;
|
||||||
|
use domain::{
|
||||||
|
errors::DomainError,
|
||||||
|
events::DomainEvent,
|
||||||
|
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
||||||
|
ports::{
|
||||||
|
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
|
||||||
|
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
|
||||||
|
},
|
||||||
|
value_objects::{
|
||||||
|
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||||
|
Rating, ReleaseYear, ReviewId, UserId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Panic-stub ports: each method panics so any accidental dispatch into a service
|
||||||
|
// fails the test loudly rather than silently succeeding.
|
||||||
|
struct PanicRepo;
|
||||||
|
struct PanicMetadata;
|
||||||
|
struct PanicFetcher;
|
||||||
|
struct PanicStorage;
|
||||||
|
struct PanicAuth;
|
||||||
|
struct PanicHasher;
|
||||||
|
struct PanicUserRepo;
|
||||||
|
struct NoopPublisher;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl MovieRepository for PanicRepo {
|
||||||
|
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
|
||||||
|
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
|
||||||
|
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_review_by_id(&self, _: &ReviewId) -> Result<Option<Review>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn delete_review(&self, _: &ReviewId) -> Result<(), DomainError> { panic!("unexpected") }
|
||||||
|
async fn delete_movie(&self, _: &MovieId) -> Result<(), DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl MetadataClient for PanicMetadata {
|
||||||
|
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl PosterFetcherClient for PanicFetcher {
|
||||||
|
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl PosterStorage for PanicStorage {
|
||||||
|
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AuthService for PanicAuth {
|
||||||
|
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
|
||||||
|
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl PasswordHasher for PanicHasher {
|
||||||
|
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
|
||||||
|
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl UserRepository for PanicUserRepo {
|
||||||
|
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
|
||||||
|
async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventPublisher for NoopPublisher {
|
||||||
|
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn panic_ctx() -> AppContext {
|
||||||
|
AppContext {
|
||||||
|
repository: Arc::new(PanicRepo),
|
||||||
|
metadata_client: Arc::new(PanicMetadata),
|
||||||
|
poster_fetcher: Arc::new(PanicFetcher),
|
||||||
|
poster_storage: Arc::new(PanicStorage),
|
||||||
|
event_publisher: Arc::new(NoopPublisher),
|
||||||
|
auth_service: Arc::new(PanicAuth),
|
||||||
|
password_hasher: Arc::new(PanicHasher),
|
||||||
|
user_repository: Arc::new(PanicUserRepo),
|
||||||
|
config: AppConfig { allow_registration: false },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn review_logged_is_ignored() {
|
||||||
|
let handler = PosterSyncHandler::new(panic_ctx(), 3);
|
||||||
|
let event = DomainEvent::ReviewLogged {
|
||||||
|
review_id: ReviewId::generate(),
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
user_id: UserId::generate(),
|
||||||
|
rating: Rating::new(4).unwrap(),
|
||||||
|
watched_at: chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap().and_hms_opt(0, 0, 0).unwrap(),
|
||||||
|
};
|
||||||
|
assert!(handler.handle(&event).await.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
use axum::{
|
use axum::{
|
||||||
extract::{FromRef, FromRequestParts},
|
extract::{FromRef, FromRequestParts},
|
||||||
http::{header::AUTHORIZATION, request::Parts},
|
http::{header, header::AUTHORIZATION, request::Parts},
|
||||||
|
response::{IntoResponse, Redirect},
|
||||||
};
|
};
|
||||||
use domain::{errors::DomainError, value_objects::UserId};
|
use domain::{errors::DomainError, value_objects::UserId};
|
||||||
|
|
||||||
@@ -36,6 +37,64 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct OptionalCookieUser(pub Option<UserId>);
|
||||||
|
pub struct RequiredCookieUser(pub UserId);
|
||||||
|
|
||||||
|
fn extract_token_from_cookie(parts: &Parts) -> Option<String> {
|
||||||
|
parts
|
||||||
|
.headers
|
||||||
|
.get(header::COOKIE)
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.and_then(|cookies| {
|
||||||
|
cookies
|
||||||
|
.split(';')
|
||||||
|
.find_map(|c| c.trim().strip_prefix("token=").map(str::to_string))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S> FromRequestParts<S> for OptionalCookieUser
|
||||||
|
where
|
||||||
|
AppState: FromRef<S>,
|
||||||
|
S: Send + Sync,
|
||||||
|
{
|
||||||
|
type Rejection = std::convert::Infallible;
|
||||||
|
|
||||||
|
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||||
|
let app_state = AppState::from_ref(state);
|
||||||
|
let Some(token) = extract_token_from_cookie(parts) else {
|
||||||
|
return Ok(OptionalCookieUser(None));
|
||||||
|
};
|
||||||
|
let user_id = app_state
|
||||||
|
.app_ctx
|
||||||
|
.auth_service
|
||||||
|
.validate_token(&token)
|
||||||
|
.await
|
||||||
|
.ok();
|
||||||
|
Ok(OptionalCookieUser(user_id))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S> FromRequestParts<S> for RequiredCookieUser
|
||||||
|
where
|
||||||
|
AppState: FromRef<S>,
|
||||||
|
S: Send + Sync,
|
||||||
|
{
|
||||||
|
type Rejection = axum::response::Response;
|
||||||
|
|
||||||
|
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
|
||||||
|
let app_state = AppState::from_ref(state);
|
||||||
|
let token = extract_token_from_cookie(parts)
|
||||||
|
.ok_or_else(|| Redirect::to("/login").into_response())?;
|
||||||
|
let user_id = app_state
|
||||||
|
.app_ctx
|
||||||
|
.auth_service
|
||||||
|
.validate_token(&token)
|
||||||
|
.await
|
||||||
|
.map_err(|_| Redirect::to("/login").into_response())?;
|
||||||
|
Ok(RequiredCookieUser(user_id))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
@@ -72,11 +131,17 @@ mod tests {
|
|||||||
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||||
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||||
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
|
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PanicRenderer;
|
struct PanicRenderer;
|
||||||
impl crate::ports::HtmlRenderer for PanicRenderer {
|
impl crate::ports::HtmlRenderer for PanicRenderer {
|
||||||
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>) -> Result<String, String> { panic!() }
|
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||||
|
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PanicRssRenderer;
|
struct PanicRssRenderer;
|
||||||
@@ -91,7 +156,7 @@ mod tests {
|
|||||||
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||||
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||||
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
|
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
|
||||||
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } }
|
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } }
|
||||||
|
|
||||||
let state = crate::state::AppState {
|
let state = crate::state::AppState {
|
||||||
app_ctx: AppContext {
|
app_ctx: AppContext {
|
||||||
@@ -122,4 +187,201 @@ mod tests {
|
|||||||
|
|
||||||
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
|
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Reusable helpers for cookie extractor tests
|
||||||
|
async fn optional_cookie_handler(user: OptionalCookieUser) -> String {
|
||||||
|
match user.0 {
|
||||||
|
Some(id) => id.value().to_string(),
|
||||||
|
None => "none".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn required_cookie_handler(user: RequiredCookieUser) -> String {
|
||||||
|
user.0.value().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_router_optional(state: crate::state::AppState) -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/optional", get(optional_cookie_handler))
|
||||||
|
.with_state(state)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_router_required(state: crate::state::AppState) -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/required", get(required_cookie_handler))
|
||||||
|
.with_state(state)
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RejectingAuth;
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl domain::ports::AuthService for RejectingAuth {
|
||||||
|
async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> {
|
||||||
|
Err(domain::errors::DomainError::Unauthorized("bad token".into()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn panic_state() -> crate::state::AppState {
|
||||||
|
use std::sync::Arc;
|
||||||
|
use application::context::AppContext;
|
||||||
|
struct PanicRepo2;
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl domain::ports::MovieRepository for PanicRepo2 {
|
||||||
|
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
|
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
|
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
|
}
|
||||||
|
struct PanicMeta2; struct PanicFetcher2; struct PanicStorage2; struct PanicEvent2; struct PanicHasher2; struct PanicUserRepo2;
|
||||||
|
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta2 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher2 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage2 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent2 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher2 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::AuthService for PanicAuth2 { async fn generate_token(&self, _: &domain::value_objects::UserId) -> Result<domain::ports::GeneratedToken, domain::errors::DomainError> { panic!() } async fn validate_token(&self, _: &str) -> Result<domain::value_objects::UserId, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo2 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } }
|
||||||
|
struct PanicRenderer2;
|
||||||
|
impl crate::ports::HtmlRenderer for PanicRenderer2 {
|
||||||
|
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||||
|
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
}
|
||||||
|
struct PanicRssRenderer2;
|
||||||
|
impl crate::ports::RssFeedRenderer for PanicRssRenderer2 {
|
||||||
|
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() }
|
||||||
|
}
|
||||||
|
struct PanicAuth2;
|
||||||
|
crate::state::AppState {
|
||||||
|
app_ctx: AppContext {
|
||||||
|
repository: Arc::new(PanicRepo2),
|
||||||
|
metadata_client: Arc::new(PanicMeta2),
|
||||||
|
poster_fetcher: Arc::new(PanicFetcher2),
|
||||||
|
poster_storage: Arc::new(PanicStorage2),
|
||||||
|
event_publisher: Arc::new(PanicEvent2),
|
||||||
|
auth_service: Arc::new(PanicAuth2),
|
||||||
|
password_hasher: Arc::new(PanicHasher2),
|
||||||
|
user_repository: Arc::new(PanicUserRepo2),
|
||||||
|
config: application::config::AppConfig { allow_registration: false },
|
||||||
|
},
|
||||||
|
html_renderer: Arc::new(PanicRenderer2),
|
||||||
|
rss_renderer: Arc::new(PanicRssRenderer2),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rejecting_state() -> crate::state::AppState {
|
||||||
|
use std::sync::Arc;
|
||||||
|
use application::context::AppContext;
|
||||||
|
struct PanicRepo3;
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl domain::ports::MovieRepository for PanicRepo3 {
|
||||||
|
async fn get_movie_by_external_id(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_movie_by_id(&self, _: &domain::value_objects::MovieId) -> Result<Option<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_movies_by_title_and_year(&self, _: &domain::value_objects::MovieTitle, _: &domain::value_objects::ReleaseYear) -> Result<Vec<domain::models::Movie>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn upsert_movie(&self, _: &domain::models::Movie) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
|
async fn save_review(&self, _: &domain::models::Review) -> Result<domain::events::DomainEvent, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn query_diary(&self, _: &domain::models::DiaryFilter) -> Result<domain::models::collections::Paginated<domain::models::DiaryEntry>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_review_history(&self, _: &domain::value_objects::MovieId) -> Result<domain::models::ReviewHistory, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn get_review_by_id(&self, _: &domain::value_objects::ReviewId) -> Result<Option<domain::models::Review>, domain::errors::DomainError> { panic!() }
|
||||||
|
async fn delete_review(&self, _: &domain::value_objects::ReviewId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
|
async fn delete_movie(&self, _: &domain::value_objects::MovieId) -> Result<(), domain::errors::DomainError> { panic!() }
|
||||||
|
}
|
||||||
|
struct PanicMeta3; struct PanicFetcher3; struct PanicStorage3; struct PanicEvent3; struct PanicHasher3; struct PanicUserRepo3;
|
||||||
|
#[async_trait::async_trait] impl domain::ports::MetadataClient for PanicMeta3 { async fn fetch_movie_metadata(&self, _: &domain::ports::MetadataSearchCriteria) -> Result<domain::models::Movie, domain::errors::DomainError> { panic!() } async fn get_poster_url(&self, _: &domain::value_objects::ExternalMetadataId) -> Result<Option<domain::value_objects::PosterUrl>, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::PosterFetcherClient for PanicFetcher3 { async fn fetch_poster_bytes(&self, _: &domain::value_objects::PosterUrl) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::PosterStorage for PanicStorage3 { async fn store_poster(&self, _: &domain::value_objects::MovieId, _: &[u8]) -> Result<domain::value_objects::PosterPath, domain::errors::DomainError> { panic!() } async fn get_poster(&self, _: &domain::value_objects::PosterPath) -> Result<Vec<u8>, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::EventPublisher for PanicEvent3 { async fn publish(&self, _: &domain::events::DomainEvent) -> Result<(), domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::PasswordHasher for PanicHasher3 { async fn hash(&self, _: &str) -> Result<domain::value_objects::PasswordHash, domain::errors::DomainError> { panic!() } async fn verify(&self, _: &str, _: &domain::value_objects::PasswordHash) -> Result<bool, domain::errors::DomainError> { panic!() } }
|
||||||
|
#[async_trait::async_trait] impl domain::ports::UserRepository for PanicUserRepo3 { async fn find_by_email(&self, _: &domain::value_objects::Email) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } async fn save(&self, _: &domain::models::User) -> Result<(), domain::errors::DomainError> { panic!() } async fn find_by_id(&self, _: &domain::value_objects::UserId) -> Result<Option<domain::models::User>, domain::errors::DomainError> { panic!() } }
|
||||||
|
struct PanicRenderer3;
|
||||||
|
impl crate::ports::HtmlRenderer for PanicRenderer3 {
|
||||||
|
fn render_diary_page(&self, _: &domain::models::collections::Paginated<domain::models::DiaryEntry>, _: application::ports::HtmlPageContext) -> Result<String, String> { panic!() }
|
||||||
|
fn render_login_page(&self, _: application::ports::LoginPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
fn render_register_page(&self, _: application::ports::RegisterPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
fn render_new_review_page(&self, _: application::ports::NewReviewPageData<'_>) -> Result<String, String> { panic!() }
|
||||||
|
}
|
||||||
|
struct PanicRssRenderer3;
|
||||||
|
impl crate::ports::RssFeedRenderer for PanicRssRenderer3 {
|
||||||
|
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() }
|
||||||
|
}
|
||||||
|
crate::state::AppState {
|
||||||
|
app_ctx: AppContext {
|
||||||
|
repository: Arc::new(PanicRepo3),
|
||||||
|
metadata_client: Arc::new(PanicMeta3),
|
||||||
|
poster_fetcher: Arc::new(PanicFetcher3),
|
||||||
|
poster_storage: Arc::new(PanicStorage3),
|
||||||
|
event_publisher: Arc::new(PanicEvent3),
|
||||||
|
auth_service: Arc::new(RejectingAuth),
|
||||||
|
password_hasher: Arc::new(PanicHasher3),
|
||||||
|
user_repository: Arc::new(PanicUserRepo3),
|
||||||
|
config: application::config::AppConfig { allow_registration: false },
|
||||||
|
},
|
||||||
|
html_renderer: Arc::new(PanicRenderer3),
|
||||||
|
rss_renderer: Arc::new(PanicRssRenderer3),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn optional_cookie_user_returns_none_without_cookie() {
|
||||||
|
let app = test_router_optional(panic_state());
|
||||||
|
let response = app
|
||||||
|
.oneshot(Request::builder().uri("/optional").body(Body::empty()).unwrap())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(response.status(), StatusCode::OK);
|
||||||
|
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
|
||||||
|
assert_eq!(&body[..], b"none");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn optional_cookie_user_returns_none_with_invalid_token() {
|
||||||
|
let app = test_router_optional(rejecting_state());
|
||||||
|
let response = app
|
||||||
|
.oneshot(
|
||||||
|
Request::builder()
|
||||||
|
.uri("/optional")
|
||||||
|
.header("cookie", "token=bad.token.here")
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(response.status(), StatusCode::OK);
|
||||||
|
let body = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
|
||||||
|
assert_eq!(&body[..], b"none");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn required_cookie_user_redirects_without_cookie() {
|
||||||
|
let app = test_router_required(panic_state());
|
||||||
|
let response = app
|
||||||
|
.oneshot(Request::builder().uri("/required").body(Body::empty()).unwrap())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(response.status(), StatusCode::SEE_OTHER);
|
||||||
|
assert_eq!(response.headers().get("location").unwrap(), "/login");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn required_cookie_user_redirects_with_invalid_token() {
|
||||||
|
let app = test_router_required(rejecting_state());
|
||||||
|
let response = app
|
||||||
|
.oneshot(
|
||||||
|
Request::builder()
|
||||||
|
.uri("/required")
|
||||||
|
.header("cookie", "token=bad.token.here")
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(response.status(), StatusCode::SEE_OTHER);
|
||||||
|
assert_eq!(response.headers().get("location").unwrap(), "/login");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,26 +1,66 @@
|
|||||||
pub mod html {
|
pub mod html {
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Query, State},
|
extract::{Path, Query, State},
|
||||||
|
http::{HeaderValue, StatusCode, header::SET_COOKIE},
|
||||||
response::{Html, IntoResponse, Redirect},
|
response::{Html, IntoResponse, Redirect},
|
||||||
Form,
|
Form,
|
||||||
};
|
};
|
||||||
use chrono::NaiveDateTime;
|
use chrono::{NaiveDateTime, Utc};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
use application::{
|
use application::{
|
||||||
commands::LogReviewCommand,
|
commands::{DeleteReviewCommand, LoginCommand, LogReviewCommand, RegisterCommand},
|
||||||
|
ports::{HtmlPageContext, LoginPageData, NewReviewPageData, RegisterPageData},
|
||||||
queries::GetDiaryQuery,
|
queries::GetDiaryQuery,
|
||||||
use_cases::{get_diary, log_review},
|
use_cases::{delete_review, get_diary, log_review, login as login_uc, register as register_uc},
|
||||||
};
|
};
|
||||||
use domain::{errors::DomainError, models::SortDirection};
|
use domain::{errors::DomainError, models::SortDirection, value_objects::UserId};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
dtos::{DiaryQueryParams, LogReviewForm},
|
dtos::{DiaryQueryParams, ErrorQuery, LoginForm, LogReviewForm, RegisterForm},
|
||||||
errors::ApiError,
|
errors::ApiError,
|
||||||
extractors::AuthenticatedUser,
|
extractors::{OptionalCookieUser, RequiredCookieUser},
|
||||||
state::AppState,
|
state::AppState,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub async fn get_diary_page(
|
async fn build_page_context(state: &AppState, user_id: Option<UserId>) -> HtmlPageContext {
|
||||||
|
let uuid = user_id.as_ref().map(|u| u.value());
|
||||||
|
let user_email = if let Some(ref id) = user_id {
|
||||||
|
state
|
||||||
|
.app_ctx
|
||||||
|
.user_repository
|
||||||
|
.find_by_id(id)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.map(|u| u.email().value().to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
HtmlPageContext {
|
||||||
|
user_email,
|
||||||
|
user_id: uuid,
|
||||||
|
register_enabled: state.app_ctx.config.allow_registration,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode_error(msg: &str) -> String {
|
||||||
|
msg.replace(' ', "+")
|
||||||
|
.replace('&', "%26")
|
||||||
|
.replace('=', "%3D")
|
||||||
|
.replace('"', "%22")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_cookie_header(token: &str, max_age: i64) -> (axum::http::HeaderName, HeaderValue) {
|
||||||
|
let val = format!(
|
||||||
|
"token={}; HttpOnly; Path=/; SameSite=Lax; Max-Age={}",
|
||||||
|
token, max_age
|
||||||
|
);
|
||||||
|
(SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_index(
|
||||||
|
OptionalCookieUser(user_id): OptionalCookieUser,
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Query(params): Query<DiaryQueryParams>,
|
Query(params): Query<DiaryQueryParams>,
|
||||||
) -> Result<impl IntoResponse, ApiError> {
|
) -> Result<impl IntoResponse, ApiError> {
|
||||||
@@ -36,42 +76,221 @@ pub mod html {
|
|||||||
}),
|
}),
|
||||||
movie_id: params.movie_id,
|
movie_id: params.movie_id,
|
||||||
};
|
};
|
||||||
|
let ctx = build_page_context(&state, user_id).await;
|
||||||
let page = get_diary::execute(&state.app_ctx, query).await?;
|
let page = get_diary::execute(&state.app_ctx, query).await?;
|
||||||
let html = state
|
let html = state
|
||||||
.html_renderer
|
.html_renderer
|
||||||
.render_diary_page(&page)
|
.render_diary_page(&page, ctx)
|
||||||
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
|
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
|
||||||
|
|
||||||
Ok(Html(html))
|
Ok(Html(html))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_login_page(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Query(params): Query<ErrorQuery>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let ctx = HtmlPageContext {
|
||||||
|
user_email: None,
|
||||||
|
user_id: None,
|
||||||
|
register_enabled: state.app_ctx.config.allow_registration,
|
||||||
|
};
|
||||||
|
let html = state
|
||||||
|
.html_renderer
|
||||||
|
.render_login_page(LoginPageData {
|
||||||
|
ctx,
|
||||||
|
error: params.error.as_deref(),
|
||||||
|
})
|
||||||
|
.expect("login template failed");
|
||||||
|
Html(html)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn post_login(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Form(form): Form<LoginForm>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
match login_uc::execute(
|
||||||
|
&state.app_ctx,
|
||||||
|
LoginCommand {
|
||||||
|
email: form.email,
|
||||||
|
password: form.password,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(result) => {
|
||||||
|
let max_age = (result.expires_at - Utc::now()).num_seconds().max(0);
|
||||||
|
let cookie = set_cookie_header(&result.token, max_age);
|
||||||
|
([cookie], Redirect::to("/")).into_response()
|
||||||
|
}
|
||||||
|
Err(_) => Redirect::to("/login?error=Invalid+credentials").into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_logout() -> impl IntoResponse {
|
||||||
|
let cookie = (
|
||||||
|
SET_COOKIE,
|
||||||
|
HeaderValue::from_static("token=; HttpOnly; Path=/; SameSite=Lax; Max-Age=0"),
|
||||||
|
);
|
||||||
|
([cookie], Redirect::to("/")).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_register_page(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Query(params): Query<ErrorQuery>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
if !state.app_ctx.config.allow_registration {
|
||||||
|
return Redirect::to("/").into_response();
|
||||||
|
}
|
||||||
|
let ctx = HtmlPageContext {
|
||||||
|
user_email: None,
|
||||||
|
user_id: None,
|
||||||
|
register_enabled: true,
|
||||||
|
};
|
||||||
|
let html = state
|
||||||
|
.html_renderer
|
||||||
|
.render_register_page(RegisterPageData {
|
||||||
|
ctx,
|
||||||
|
error: params.error.as_deref(),
|
||||||
|
})
|
||||||
|
.expect("register template failed");
|
||||||
|
Html(html).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn post_register(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Form(form): Form<RegisterForm>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
if !state.app_ctx.config.allow_registration {
|
||||||
|
return Redirect::to("/").into_response();
|
||||||
|
}
|
||||||
|
let email = form.email.clone();
|
||||||
|
let password = form.password.clone();
|
||||||
|
match register_uc::execute(
|
||||||
|
&state.app_ctx,
|
||||||
|
RegisterCommand {
|
||||||
|
email: form.email,
|
||||||
|
password: form.password,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
match login_uc::execute(&state.app_ctx, LoginCommand { email, password }).await {
|
||||||
|
Ok(result) => {
|
||||||
|
let max_age = (result.expires_at - Utc::now()).num_seconds().max(0);
|
||||||
|
let cookie = set_cookie_header(&result.token, max_age);
|
||||||
|
([cookie], Redirect::to("/")).into_response()
|
||||||
|
}
|
||||||
|
Err(_) => Redirect::to("/login").into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = encode_error(&e.to_string());
|
||||||
|
Redirect::to(&format!("/register?error={}", msg)).into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_new_review_page(
|
||||||
|
RequiredCookieUser(user_id): RequiredCookieUser,
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Query(params): Query<ErrorQuery>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let ctx = build_page_context(&state, Some(user_id)).await;
|
||||||
|
let html = state
|
||||||
|
.html_renderer
|
||||||
|
.render_new_review_page(NewReviewPageData {
|
||||||
|
ctx,
|
||||||
|
error: params.error.as_deref(),
|
||||||
|
})
|
||||||
|
.expect("new_review template failed");
|
||||||
|
Html(html)
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn post_review(
|
pub async fn post_review(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
user: AuthenticatedUser,
|
RequiredCookieUser(user_id): RequiredCookieUser,
|
||||||
Form(form): Form<LogReviewForm>,
|
Form(form): Form<LogReviewForm>,
|
||||||
) -> Result<impl IntoResponse, ApiError> {
|
) -> impl IntoResponse {
|
||||||
let watched_at = NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M:%S")
|
let watched_at = NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M:%S")
|
||||||
.map_err(|_| {
|
.or_else(|_| NaiveDateTime::parse_from_str(&form.watched_at, "%Y-%m-%dT%H:%M"));
|
||||||
ApiError(DomainError::ValidationError(
|
|
||||||
"Invalid watched_at format, expected YYYY-MM-DDTHH:MM:SS".into(),
|
let watched_at = match watched_at {
|
||||||
))
|
Ok(dt) => dt,
|
||||||
})?;
|
Err(_) => {
|
||||||
|
return Redirect::to("/reviews/new?error=Invalid+date+format").into_response()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let cmd = LogReviewCommand {
|
let cmd = LogReviewCommand {
|
||||||
external_metadata_id: form.external_metadata_id,
|
external_metadata_id: form.external_metadata_id.filter(|s| !s.trim().is_empty()),
|
||||||
manual_title: form.manual_title,
|
manual_title: form.manual_title,
|
||||||
manual_release_year: form.manual_release_year,
|
manual_release_year: form.manual_release_year,
|
||||||
manual_director: form.manual_director,
|
manual_director: form.manual_director,
|
||||||
user_id: user.0.value(),
|
user_id: user_id.value(),
|
||||||
rating: form.rating,
|
rating: form.rating,
|
||||||
comment: form.comment,
|
comment: form.comment,
|
||||||
watched_at,
|
watched_at,
|
||||||
};
|
};
|
||||||
|
|
||||||
log_review::execute(&state.app_ctx, cmd).await?;
|
match log_review::execute(&state.app_ctx, cmd).await {
|
||||||
|
Ok(_) => Redirect::to("/").into_response(),
|
||||||
|
Err(e) => {
|
||||||
|
let msg = encode_error(&e.to_string());
|
||||||
|
Redirect::to(&format!("/reviews/new?error={}", msg)).into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Redirect::to("/diary"))
|
pub async fn post_delete_review(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
RequiredCookieUser(user_id): RequiredCookieUser,
|
||||||
|
Path(review_id): Path<Uuid>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let cmd = DeleteReviewCommand {
|
||||||
|
review_id,
|
||||||
|
requesting_user_id: user_id.value(),
|
||||||
|
};
|
||||||
|
match delete_review::execute(&state.app_ctx, cmd).await {
|
||||||
|
Ok(()) => Redirect::to("/").into_response(),
|
||||||
|
Err(DomainError::NotFound(_)) => StatusCode::NOT_FOUND.into_response(),
|
||||||
|
Err(DomainError::Unauthorized(_)) => StatusCode::FORBIDDEN.into_response(),
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("delete_review html error: {:?}", e);
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR.into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod posters {
|
||||||
|
use axum::{
|
||||||
|
extract::{Path, State},
|
||||||
|
http::{StatusCode, header},
|
||||||
|
response::IntoResponse,
|
||||||
|
};
|
||||||
|
|
||||||
|
use domain::value_objects::PosterPath;
|
||||||
|
|
||||||
|
use crate::state::AppState;
|
||||||
|
|
||||||
|
pub async fn get_poster(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(path): Path<String>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let poster_path = match PosterPath::new(path) {
|
||||||
|
Ok(p) => p,
|
||||||
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
||||||
|
};
|
||||||
|
match state.app_ctx.poster_storage.get_poster(&poster_path).await {
|
||||||
|
Ok(bytes) => {
|
||||||
|
let mime = infer::get(&bytes)
|
||||||
|
.map(|t| t.mime_type())
|
||||||
|
.unwrap_or("application/octet-stream");
|
||||||
|
([(header::CONTENT_TYPE, mime)], bytes).into_response()
|
||||||
|
}
|
||||||
|
Err(_) => StatusCode::NOT_FOUND.into_response(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -114,9 +333,9 @@ pub mod api {
|
|||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use application::{
|
use application::{
|
||||||
commands::{LoginCommand, LogReviewCommand, RegisterCommand, SyncPosterCommand},
|
commands::{DeleteReviewCommand, LoginCommand, LogReviewCommand, RegisterCommand, SyncPosterCommand},
|
||||||
queries::{GetDiaryQuery, GetReviewHistoryQuery},
|
queries::{GetDiaryQuery, GetReviewHistoryQuery},
|
||||||
use_cases::{get_diary, get_review_history, log_review, login as login_uc, register as register_uc, sync_poster},
|
use_cases::{delete_review, get_diary, get_review_history, log_review, login as login_uc, register as register_uc, sync_poster},
|
||||||
};
|
};
|
||||||
use domain::{
|
use domain::{
|
||||||
errors::DomainError,
|
errors::DomainError,
|
||||||
@@ -197,7 +416,7 @@ pub mod api {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
let cmd = LogReviewCommand {
|
let cmd = LogReviewCommand {
|
||||||
external_metadata_id: req.external_metadata_id,
|
external_metadata_id: req.external_metadata_id.filter(|s| !s.trim().is_empty()),
|
||||||
manual_title: req.manual_title,
|
manual_title: req.manual_title,
|
||||||
manual_release_year: req.manual_release_year,
|
manual_release_year: req.manual_release_year,
|
||||||
manual_director: req.manual_director,
|
manual_director: req.manual_director,
|
||||||
@@ -275,6 +494,26 @@ pub mod api {
|
|||||||
Ok(StatusCode::CREATED)
|
Ok(StatusCode::CREATED)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn delete_review(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
AuthenticatedUser(user_id): AuthenticatedUser,
|
||||||
|
Path(review_id): Path<Uuid>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let cmd = DeleteReviewCommand {
|
||||||
|
review_id,
|
||||||
|
requesting_user_id: user_id.value(),
|
||||||
|
};
|
||||||
|
match delete_review::execute(&state.app_ctx, cmd).await {
|
||||||
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
||||||
|
Err(DomainError::NotFound(_)) => StatusCode::NOT_FOUND.into_response(),
|
||||||
|
Err(DomainError::Unauthorized(_)) => StatusCode::FORBIDDEN.into_response(),
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!("delete_review error: {:?}", e);
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR.into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn movie_to_dto(movie: &Movie) -> MovieDto {
|
fn movie_to_dto(movie: &Movie) -> MovieDto {
|
||||||
MovieDto {
|
MovieDto {
|
||||||
id: movie.id().value(),
|
id: movie.id().value(),
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
pub mod event_handlers;
|
||||||
pub mod dtos;
|
pub mod dtos;
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod extractors;
|
pub mod extractors;
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use async_trait::async_trait;
|
use event_publisher::{EventPublisherConfig, NoopEventPublisher, create_event_channel};
|
||||||
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
use presentation::event_handlers::PosterSyncHandler;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use sqlx::SqlitePool;
|
use sqlx::SqlitePool;
|
||||||
|
use sqlx::sqlite::SqliteConnectOptions;
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
@@ -18,15 +21,6 @@ use template_askama::AskamaHtmlRenderer;
|
|||||||
|
|
||||||
use presentation::{routes, state::AppState};
|
use presentation::{routes, state::AppState};
|
||||||
|
|
||||||
struct StubEventPublisher;
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl EventPublisher for StubEventPublisher {
|
|
||||||
async fn publish(&self, _event: &DomainEvent) -> Result<(), DomainError> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> anyhow::Result<()> {
|
async fn main() -> anyhow::Result<()> {
|
||||||
dotenvy::dotenv().ok();
|
dotenvy::dotenv().ok();
|
||||||
@@ -51,7 +45,11 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
|||||||
let app_config = AppConfig::from_env();
|
let app_config = AppConfig::from_env();
|
||||||
let omdb_api_key = std::env::var("OMDB_API_KEY").context("OMDB_API_KEY must be set")?;
|
let omdb_api_key = std::env::var("OMDB_API_KEY").context("OMDB_API_KEY must be set")?;
|
||||||
|
|
||||||
let pool = SqlitePool::connect("sqlite://reviews.db")
|
let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL must be set")?;
|
||||||
|
let opts = SqliteConnectOptions::from_str(&database_url)
|
||||||
|
.context("Invalid DATABASE_URL")?
|
||||||
|
.create_if_missing(true);
|
||||||
|
let pool = SqlitePool::connect_with(opts)
|
||||||
.await
|
.await
|
||||||
.context("Failed to connect to SQLite database")?;
|
.context("Failed to connect to SQLite database")?;
|
||||||
|
|
||||||
@@ -62,17 +60,48 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
|||||||
.map_err(|e| anyhow::anyhow!("{}", e))
|
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||||
.context("Database migration failed")?;
|
.context("Database migration failed")?;
|
||||||
|
|
||||||
let user_repo = SqliteUserRepository::new(pool);
|
use domain::ports::{
|
||||||
|
AuthService, MetadataClient, MovieRepository, PasswordHasher,
|
||||||
|
PosterFetcherClient, PosterStorage, UserRepository,
|
||||||
|
};
|
||||||
|
let repository: Arc<dyn MovieRepository> = Arc::new(movie_repo);
|
||||||
|
let user_repository: Arc<dyn UserRepository> = Arc::new(SqliteUserRepository::new(pool));
|
||||||
|
let metadata_client: Arc<dyn MetadataClient> = Arc::new(MetadataClientImpl::new_omdb(omdb_api_key));
|
||||||
|
let poster_fetcher: Arc<dyn PosterFetcherClient> = Arc::new(ReqwestPosterFetcher::new(PosterFetcherConfig::from_env())?);
|
||||||
|
let poster_storage: Arc<dyn PosterStorage> = Arc::new(PosterStorageAdapter::from_config(storage_config)?);
|
||||||
|
let auth_service: Arc<dyn AuthService> = Arc::new(JwtAuthService::new(auth_config));
|
||||||
|
let password_hasher: Arc<dyn PasswordHasher> = Arc::new(Argon2PasswordHasher);
|
||||||
|
|
||||||
|
// Build a context for the poster handler. sync_poster doesn't publish events,
|
||||||
|
// so a noop publisher here is safe and avoids a circular dependency.
|
||||||
|
let handler_ctx = AppContext {
|
||||||
|
repository: Arc::clone(&repository),
|
||||||
|
metadata_client: Arc::clone(&metadata_client),
|
||||||
|
poster_fetcher: Arc::clone(&poster_fetcher),
|
||||||
|
poster_storage: Arc::clone(&poster_storage),
|
||||||
|
event_publisher: Arc::new(NoopEventPublisher),
|
||||||
|
auth_service: Arc::clone(&auth_service),
|
||||||
|
password_hasher: Arc::clone(&password_hasher),
|
||||||
|
user_repository: Arc::clone(&user_repository),
|
||||||
|
config: app_config.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let poster_handler = PosterSyncHandler::new(handler_ctx, 3);
|
||||||
|
let (event_publisher, event_worker) = create_event_channel(
|
||||||
|
EventPublisherConfig::from_env(),
|
||||||
|
vec![Box::new(poster_handler)],
|
||||||
|
);
|
||||||
|
tokio::spawn(event_worker.run());
|
||||||
|
|
||||||
let app_ctx = AppContext {
|
let app_ctx = AppContext {
|
||||||
repository: Arc::new(movie_repo),
|
repository,
|
||||||
metadata_client: Arc::new(MetadataClientImpl::new_omdb(omdb_api_key)),
|
metadata_client,
|
||||||
poster_fetcher: Arc::new(ReqwestPosterFetcher::new(PosterFetcherConfig::from_env())?),
|
poster_fetcher,
|
||||||
poster_storage: Arc::new(PosterStorageAdapter::from_config(storage_config)?),
|
poster_storage,
|
||||||
event_publisher: Arc::new(StubEventPublisher),
|
event_publisher: Arc::new(event_publisher),
|
||||||
auth_service: Arc::new(JwtAuthService::new(auth_config)),
|
auth_service,
|
||||||
password_hasher: Arc::new(Argon2PasswordHasher),
|
password_hasher,
|
||||||
user_repository: Arc::new(user_repo),
|
user_repository,
|
||||||
config: app_config,
|
config: app_config,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,22 @@ pub fn build_router(state: AppState) -> Router {
|
|||||||
|
|
||||||
fn html_routes() -> Router<AppState> {
|
fn html_routes() -> Router<AppState> {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/diary", routing::get(handlers::html::get_diary_page))
|
.route("/", routing::get(handlers::html::get_index))
|
||||||
|
.route(
|
||||||
|
"/login",
|
||||||
|
routing::get(handlers::html::get_login_page)
|
||||||
|
.post(handlers::html::post_login),
|
||||||
|
)
|
||||||
|
.route("/logout", routing::get(handlers::html::get_logout))
|
||||||
|
.route(
|
||||||
|
"/register",
|
||||||
|
routing::get(handlers::html::get_register_page)
|
||||||
|
.post(handlers::html::post_register),
|
||||||
|
)
|
||||||
|
.route("/reviews/new", routing::get(handlers::html::get_new_review_page))
|
||||||
.route("/reviews", routing::post(handlers::html::post_review))
|
.route("/reviews", routing::post(handlers::html::post_review))
|
||||||
|
.route("/reviews/{id}/delete", routing::post(handlers::html::post_delete_review))
|
||||||
|
.route("/posters/{path}", routing::get(handlers::posters::get_poster))
|
||||||
.route("/feed.rss", routing::get(handlers::rss::get_feed))
|
.route("/feed.rss", routing::get(handlers::rss::get_feed))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -29,6 +43,7 @@ fn api_routes() -> Router<AppState> {
|
|||||||
routing::get(handlers::api::get_review_history),
|
routing::get(handlers::api::get_review_history),
|
||||||
)
|
)
|
||||||
.route("/reviews", routing::post(handlers::api::post_review))
|
.route("/reviews", routing::post(handlers::api::post_review))
|
||||||
|
.route("/reviews/{id}", routing::delete(handlers::api::delete_review))
|
||||||
.route(
|
.route(
|
||||||
"/movies/{id}/sync-poster",
|
"/movies/{id}/sync-poster",
|
||||||
routing::post(handlers::api::sync_poster),
|
routing::post(handlers::api::sync_poster),
|
||||||
|
|||||||
@@ -84,6 +84,7 @@ struct NobodyUserRepo;
|
|||||||
impl UserRepository for NobodyUserRepo {
|
impl UserRepository for NobodyUserRepo {
|
||||||
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { Ok(None) }
|
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { Ok(None) }
|
||||||
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!() }
|
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!() }
|
||||||
|
async fn find_by_id(&self, _: &UserId) -> Result<Option<User>, DomainError> { panic!() }
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn test_app() -> Router {
|
async fn test_app() -> Router {
|
||||||
|
|||||||
620
docs/superpowers/plans/2026-05-04-event-driven-poster-sync.md
Normal file
620
docs/superpowers/plans/2026-05-04-event-driven-poster-sync.md
Normal file
@@ -0,0 +1,620 @@
|
|||||||
|
# Event-Driven Poster Sync Implementation Plan
|
||||||
|
|
||||||
|
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||||
|
|
||||||
|
**Goal:** Add an `EventHandler` trait to the event-publisher adapter and implement `PosterSyncHandler` so that a `MovieDiscovered` event automatically triggers the existing `sync_poster` use case with exponential-backoff retry.
|
||||||
|
|
||||||
|
**Architecture:** `EventWorker` gains a `Vec<Box<dyn EventHandler>>` and fans out each received event to all registered handlers sequentially. `PosterSyncHandler` lives in the `presentation` crate (composition root), holds `AppContext`, and calls `sync_poster::execute` on `MovieDiscovered` events — ignoring all others. Retry is up to 3 retries (4 total attempts) with delays 1s → 2s → 4s.
|
||||||
|
|
||||||
|
**Tech Stack:** Rust, tokio::sync::mpsc, async-trait, existing `sync_poster` use case
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File Map
|
||||||
|
|
||||||
|
| File | Status | Responsibility |
|
||||||
|
|---|---|---|
|
||||||
|
| `crates/adapters/event-publisher/src/lib.rs` | Modify | Add `EventHandler` trait; extend `EventWorker` and `create_event_channel` |
|
||||||
|
| `crates/application/src/commands.rs` | Modify | Add `#[derive(Clone)]` to `SyncPosterCommand` |
|
||||||
|
| `crates/presentation/src/lib.rs` | Modify | Expose `pub mod event_handlers` |
|
||||||
|
| `crates/presentation/src/event_handlers.rs` | Create | `PosterSyncHandler` implementation |
|
||||||
|
| `crates/presentation/src/main.rs` | Modify | Wire `PosterSyncHandler` into `create_event_channel` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Task 1: Add `EventHandler` trait and update `EventWorker`
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `crates/adapters/event-publisher/src/lib.rs`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add to the bottom of `crates/adapters/event-publisher/src/lib.rs`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use domain::{
|
||||||
|
errors::DomainError,
|
||||||
|
events::DomainEvent,
|
||||||
|
value_objects::{ExternalMetadataId, MovieId},
|
||||||
|
};
|
||||||
|
|
||||||
|
struct RecordingHandler {
|
||||||
|
calls: Arc<Mutex<Vec<String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventHandler for RecordingHandler {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
let label = match event {
|
||||||
|
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
|
||||||
|
DomainEvent::ReviewLogged { .. } => "review_logged",
|
||||||
|
};
|
||||||
|
self.calls.lock().unwrap().push(label.to_string());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn single_handler_receives_event() {
|
||||||
|
let calls = Arc::new(Mutex::new(vec![]));
|
||||||
|
let handler = RecordingHandler { calls: Arc::clone(&calls) };
|
||||||
|
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||||
|
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
|
||||||
|
|
||||||
|
tokio::spawn(worker.run());
|
||||||
|
|
||||||
|
let event = DomainEvent::MovieDiscovered {
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
|
||||||
|
};
|
||||||
|
publisher.publish(&event).await.unwrap();
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||||
|
|
||||||
|
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn multiple_handlers_all_receive_event() {
|
||||||
|
let calls1 = Arc::new(Mutex::new(vec![]));
|
||||||
|
let calls2 = Arc::new(Mutex::new(vec![]));
|
||||||
|
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
|
||||||
|
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
|
||||||
|
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||||
|
let (publisher, worker) = create_event_channel(
|
||||||
|
config,
|
||||||
|
vec![Box::new(handler1), Box::new(handler2)],
|
||||||
|
);
|
||||||
|
|
||||||
|
tokio::spawn(worker.run());
|
||||||
|
|
||||||
|
let event = DomainEvent::MovieDiscovered {
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
|
||||||
|
};
|
||||||
|
publisher.publish(&event).await.unwrap();
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||||
|
|
||||||
|
assert_eq!(calls1.lock().unwrap().len(), 1);
|
||||||
|
assert_eq!(calls2.lock().unwrap().len(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn handler_error_does_not_stop_worker() {
|
||||||
|
struct FailingHandler;
|
||||||
|
#[async_trait]
|
||||||
|
impl EventHandler for FailingHandler {
|
||||||
|
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
Err(DomainError::InfrastructureError("boom".into()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let calls = Arc::new(Mutex::new(vec![]));
|
||||||
|
let good = RecordingHandler { calls: Arc::clone(&calls) };
|
||||||
|
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||||
|
let (publisher, worker) = create_event_channel(
|
||||||
|
config,
|
||||||
|
vec![Box::new(FailingHandler), Box::new(good)],
|
||||||
|
);
|
||||||
|
|
||||||
|
tokio::spawn(worker.run());
|
||||||
|
|
||||||
|
let event = DomainEvent::MovieDiscovered {
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
|
||||||
|
};
|
||||||
|
publisher.publish(&event).await.unwrap();
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||||
|
|
||||||
|
// good handler still ran despite failing handler before it
|
||||||
|
assert_eq!(calls.lock().unwrap().len(), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 2: Run tests to verify they fail**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo test -p event-publisher 2>&1 | tail -20
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: compile errors — `EventHandler` not defined, `create_event_channel` wrong arity.
|
||||||
|
|
||||||
|
- [ ] **Step 3: Replace `lib.rs` with updated implementation**
|
||||||
|
|
||||||
|
Replace the full content of `crates/adapters/event-publisher/src/lib.rs` with:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
|
||||||
|
pub struct EventPublisherConfig {
|
||||||
|
pub channel_buffer: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EventPublisherConfig {
|
||||||
|
pub fn from_env() -> Self {
|
||||||
|
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
|
||||||
|
.ok()
|
||||||
|
.and_then(|v| v.parse().ok())
|
||||||
|
.unwrap_or(128);
|
||||||
|
Self { channel_buffer }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait EventHandler: Send + Sync {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ChannelEventPublisher {
|
||||||
|
sender: mpsc::Sender<DomainEvent>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventPublisher for ChannelEventPublisher {
|
||||||
|
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
self.sender
|
||||||
|
.send(event.clone())
|
||||||
|
.await
|
||||||
|
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct EventWorker {
|
||||||
|
receiver: mpsc::Receiver<DomainEvent>,
|
||||||
|
handlers: Vec<Box<dyn EventHandler>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EventWorker {
|
||||||
|
pub async fn run(mut self) {
|
||||||
|
while let Some(event) = self.receiver.recv().await {
|
||||||
|
match &event {
|
||||||
|
DomainEvent::ReviewLogged {
|
||||||
|
review_id,
|
||||||
|
movie_id,
|
||||||
|
user_id,
|
||||||
|
rating,
|
||||||
|
watched_at,
|
||||||
|
} => {
|
||||||
|
tracing::info!(
|
||||||
|
review_id = %review_id.value(),
|
||||||
|
movie_id = %movie_id.value(),
|
||||||
|
user_id = %user_id.value(),
|
||||||
|
rating = rating.value(),
|
||||||
|
watched_at = %watched_at,
|
||||||
|
"event: review_logged"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
DomainEvent::MovieDiscovered {
|
||||||
|
movie_id,
|
||||||
|
external_metadata_id,
|
||||||
|
} => {
|
||||||
|
tracing::info!(
|
||||||
|
movie_id = %movie_id.value(),
|
||||||
|
external_id = external_metadata_id.value(),
|
||||||
|
"event: movie_discovered"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for handler in &self.handlers {
|
||||||
|
if let Err(e) = handler.handle(&event).await {
|
||||||
|
tracing::error!("event handler error: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tracing::info!("event worker shut down");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_event_channel(
|
||||||
|
config: EventPublisherConfig,
|
||||||
|
handlers: Vec<Box<dyn EventHandler>>,
|
||||||
|
) -> (ChannelEventPublisher, EventWorker) {
|
||||||
|
let (tx, rx) = mpsc::channel(config.channel_buffer);
|
||||||
|
(
|
||||||
|
ChannelEventPublisher { sender: tx },
|
||||||
|
EventWorker {
|
||||||
|
receiver: rx,
|
||||||
|
handlers,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
// paste the test module from Step 1 here
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 4: Run tests to verify they pass**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo test -p event-publisher 2>&1 | tail -20
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: `test result: ok. 3 passed`
|
||||||
|
|
||||||
|
- [ ] **Step 5: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add crates/adapters/event-publisher/src/lib.rs
|
||||||
|
git commit -m "feat(event-publisher): add EventHandler trait and fan-out in EventWorker"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Task 2: Derive `Clone` on `SyncPosterCommand`
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `crates/application/src/commands.rs`
|
||||||
|
|
||||||
|
The `PosterSyncHandler` retry loop reconstructs the command on each attempt, which requires `Clone` on `String` (already impl'd) and `Uuid` (Copy) — but it's cleaner to `#[derive(Clone)]` directly.
|
||||||
|
|
||||||
|
- [ ] **Step 1: Add `#[derive(Clone)]` to `SyncPosterCommand`**
|
||||||
|
|
||||||
|
In `crates/application/src/commands.rs`, find the `SyncPosterCommand` struct (line ~17) and add the derive:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct SyncPosterCommand {
|
||||||
|
pub movie_id: Uuid,
|
||||||
|
pub external_metadata_id: String,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 2: Verify it compiles**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo build -p application 2>&1 | tail -10
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: clean build.
|
||||||
|
|
||||||
|
- [ ] **Step 3: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add crates/application/src/commands.rs
|
||||||
|
git commit -m "feat(application): derive Clone on SyncPosterCommand"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Task 3: Implement `PosterSyncHandler`
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Create: `crates/presentation/src/event_handlers.rs`
|
||||||
|
- Modify: `crates/presentation/src/lib.rs`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Write the failing test first — create `event_handlers.rs` with tests only**
|
||||||
|
|
||||||
|
Create `crates/presentation/src/event_handlers.rs`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use domain::{errors::DomainError, events::DomainEvent};
|
||||||
|
use event_publisher::EventHandler;
|
||||||
|
|
||||||
|
pub struct PosterSyncHandler {
|
||||||
|
ctx: AppContext,
|
||||||
|
max_retries: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PosterSyncHandler {
|
||||||
|
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
|
||||||
|
Self { ctx, max_retries }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventHandler for PosterSyncHandler {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use application::config::AppConfig;
|
||||||
|
use domain::{
|
||||||
|
errors::DomainError,
|
||||||
|
events::DomainEvent,
|
||||||
|
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
||||||
|
ports::{
|
||||||
|
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
|
||||||
|
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
|
||||||
|
},
|
||||||
|
value_objects::{
|
||||||
|
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||||
|
Rating, ReleaseYear, ReviewId, UserId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Panic stubs — never called in the "ignored event" test path
|
||||||
|
struct PanicRepo;
|
||||||
|
struct PanicMetadata;
|
||||||
|
struct PanicFetcher;
|
||||||
|
struct PanicStorage;
|
||||||
|
struct PanicAuth;
|
||||||
|
struct PanicHasher;
|
||||||
|
struct PanicUserRepo;
|
||||||
|
struct NoopPublisher;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl MovieRepository for PanicRepo {
|
||||||
|
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
|
||||||
|
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
|
||||||
|
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl MetadataClient for PanicMetadata {
|
||||||
|
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl PosterFetcherClient for PanicFetcher {
|
||||||
|
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl PosterStorage for PanicStorage {
|
||||||
|
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
|
||||||
|
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AuthService for PanicAuth {
|
||||||
|
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
|
||||||
|
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl PasswordHasher for PanicHasher {
|
||||||
|
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
|
||||||
|
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl UserRepository for PanicUserRepo {
|
||||||
|
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||||
|
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl EventPublisher for NoopPublisher {
|
||||||
|
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn panic_ctx() -> AppContext {
|
||||||
|
AppContext {
|
||||||
|
repository: Arc::new(PanicRepo),
|
||||||
|
metadata_client: Arc::new(PanicMetadata),
|
||||||
|
poster_fetcher: Arc::new(PanicFetcher),
|
||||||
|
poster_storage: Arc::new(PanicStorage),
|
||||||
|
event_publisher: Arc::new(NoopPublisher),
|
||||||
|
auth_service: Arc::new(PanicAuth),
|
||||||
|
password_hasher: Arc::new(PanicHasher),
|
||||||
|
user_repository: Arc::new(PanicUserRepo),
|
||||||
|
config: AppConfig { allow_registration: false },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn review_logged_is_ignored() {
|
||||||
|
let handler = PosterSyncHandler::new(panic_ctx(), 3);
|
||||||
|
let event = DomainEvent::ReviewLogged {
|
||||||
|
review_id: ReviewId::generate(),
|
||||||
|
movie_id: MovieId::generate(),
|
||||||
|
user_id: UserId::generate(),
|
||||||
|
rating: Rating::new(4).unwrap(),
|
||||||
|
watched_at: chrono::NaiveDateTime::from_timestamp_opt(0, 0).unwrap(),
|
||||||
|
};
|
||||||
|
// returns Ok without touching any panic stubs
|
||||||
|
assert!(handler.handle(&event).await.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 2: Expose the module in `lib.rs`**
|
||||||
|
|
||||||
|
Add to `crates/presentation/src/lib.rs`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub mod event_handlers;
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 3: Run the test to verify it fails**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo test -p presentation event_handlers 2>&1 | tail -20
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: compile error or test failure because `handle` is `todo!()`.
|
||||||
|
|
||||||
|
- [ ] **Step 4: Implement `handle` in `PosterSyncHandler`**
|
||||||
|
|
||||||
|
Replace the `todo!()` body in `crates/presentation/src/event_handlers.rs`:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[async_trait]
|
||||||
|
impl EventHandler for PosterSyncHandler {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||||
|
let (movie_id, external_metadata_id) = match event {
|
||||||
|
DomainEvent::MovieDiscovered {
|
||||||
|
movie_id,
|
||||||
|
external_metadata_id,
|
||||||
|
} => (movie_id.value(), external_metadata_id.value().to_owned()),
|
||||||
|
_ => return Ok(()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut last_err: Option<DomainError> = None;
|
||||||
|
for attempt in 0..=self.max_retries {
|
||||||
|
let cmd = SyncPosterCommand {
|
||||||
|
movie_id,
|
||||||
|
external_metadata_id: external_metadata_id.clone(),
|
||||||
|
};
|
||||||
|
match sync_poster::execute(&self.ctx, cmd).await {
|
||||||
|
Ok(()) => return Ok(()),
|
||||||
|
Err(e) => {
|
||||||
|
if attempt < self.max_retries {
|
||||||
|
let delay = Duration::from_secs(2u64.pow(attempt));
|
||||||
|
tracing::warn!(
|
||||||
|
attempt = attempt + 1,
|
||||||
|
max_attempts = self.max_retries + 1,
|
||||||
|
delay_secs = delay.as_secs(),
|
||||||
|
"poster sync failed, retrying: {e}"
|
||||||
|
);
|
||||||
|
tokio::time::sleep(delay).await;
|
||||||
|
}
|
||||||
|
last_err = Some(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let err = last_err.unwrap();
|
||||||
|
tracing::error!(
|
||||||
|
attempts = self.max_retries + 1,
|
||||||
|
"poster sync failed after all attempts: {err}"
|
||||||
|
);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 5: Run the test to verify it passes**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo test -p presentation event_handlers 2>&1 | tail -20
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: `test result: ok. 1 passed`
|
||||||
|
|
||||||
|
- [ ] **Step 6: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add crates/presentation/src/event_handlers.rs crates/presentation/src/lib.rs
|
||||||
|
git commit -m "feat(presentation): implement PosterSyncHandler with retry"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Task 4: Wire `PosterSyncHandler` in `main.rs`
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `crates/presentation/src/main.rs`
|
||||||
|
|
||||||
|
- [ ] **Step 1: Add the import**
|
||||||
|
|
||||||
|
In `crates/presentation/src/main.rs`, update the import block. The existing line is:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use event_publisher::{EventPublisherConfig, create_event_channel};
|
||||||
|
```
|
||||||
|
|
||||||
|
Add below it:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use presentation::event_handlers::PosterSyncHandler;
|
||||||
|
```
|
||||||
|
|
||||||
|
- [ ] **Step 2: Wire the handler**
|
||||||
|
|
||||||
|
In `wire_dependencies`, find the two existing lines:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
let (event_publisher, event_worker) = create_event_channel(EventPublisherConfig::from_env());
|
||||||
|
tokio::spawn(event_worker.run());
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace with:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
|
||||||
|
let (event_publisher, event_worker) = create_event_channel(
|
||||||
|
EventPublisherConfig::from_env(),
|
||||||
|
vec![Box::new(poster_handler)],
|
||||||
|
);
|
||||||
|
tokio::spawn(event_worker.run());
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: `app_ctx.clone()` is cheap — all fields are `Arc<dyn Trait>`.
|
||||||
|
|
||||||
|
- [ ] **Step 3: Build the full workspace**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo build 2>&1 | tail -20
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: clean build with no errors.
|
||||||
|
|
||||||
|
- [ ] **Step 4: Run all tests**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo test 2>&1 | tail -20
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: all tests pass.
|
||||||
|
|
||||||
|
- [ ] **Step 5: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add crates/presentation/src/main.rs
|
||||||
|
git commit -m "feat(presentation): wire PosterSyncHandler into event worker"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Verification
|
||||||
|
|
||||||
|
After all tasks complete, smoke-test end-to-end:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start the server
|
||||||
|
RUST_LOG=info cargo run -p presentation
|
||||||
|
|
||||||
|
# In another terminal: log a review for a movie not yet in the DB
|
||||||
|
# (requires valid JWT — use the existing login endpoint first)
|
||||||
|
|
||||||
|
# Watch the server logs for:
|
||||||
|
# event: movie_discovered movie_id=<uuid> external_id=tt...
|
||||||
|
# poster sync attempt logs (or success with no retries needed)
|
||||||
|
```
|
||||||
|
|
||||||
|
To confirm the poster was stored, check the configured object store bucket/directory for a file named with the movie's UUID.
|
||||||
@@ -0,0 +1,115 @@
|
|||||||
|
# Event-Driven Poster Sync
|
||||||
|
|
||||||
|
**Date:** 2026-05-04
|
||||||
|
**Status:** Approved
|
||||||
|
|
||||||
|
## Problem
|
||||||
|
|
||||||
|
The `EventPublisher` infrastructure exists but only logs events via tracing. When a new movie is discovered (`MovieDiscovered` event), its poster should be automatically downloaded and stored — currently this requires a manual `POST /api/movies/{id}/sync-poster` call.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
- Introduce an `EventHandler` trait for composable event side-effects
|
||||||
|
- Implement `PosterSyncHandler` that reacts to `MovieDiscovered` by running the existing `sync_poster` use case with retry
|
||||||
|
- RSS feed is already generated fresh on every request — no event work needed there
|
||||||
|
|
||||||
|
## Design
|
||||||
|
|
||||||
|
### `EventHandler` trait (in `event-publisher` crate)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[async_trait]
|
||||||
|
pub trait EventHandler: Send + Sync {
|
||||||
|
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Lives in `event-publisher` crate alongside `ChannelEventPublisher`. Depends only on `domain` — no new crate dependencies required.
|
||||||
|
|
||||||
|
### `EventWorker` update
|
||||||
|
|
||||||
|
`EventWorker` gains a `handlers: Vec<Box<dyn EventHandler>>` field. On each received event:
|
||||||
|
1. Log the event via tracing (existing behavior, kept as default baseline)
|
||||||
|
2. Fan out to all handlers concurrently (or sequentially — see note below)
|
||||||
|
3. Handler errors are logged at ERROR level but do not stop the worker or other handlers
|
||||||
|
|
||||||
|
`create_event_channel` signature gains a `handlers` parameter:
|
||||||
|
```rust
|
||||||
|
pub fn create_event_channel(
|
||||||
|
config: EventPublisherConfig,
|
||||||
|
handlers: Vec<Box<dyn EventHandler>>,
|
||||||
|
) -> (ChannelEventPublisher, EventWorker)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Fan-out strategy:** sequential for now (simpler, avoids concurrent mutation of shared state). If handler latency becomes a concern, switch to `tokio::join_all`.
|
||||||
|
|
||||||
|
### `PosterSyncHandler` (in `presentation` crate)
|
||||||
|
|
||||||
|
New file: `crates/presentation/src/event_handlers.rs`
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub struct PosterSyncHandler {
|
||||||
|
ctx: AppContext,
|
||||||
|
max_retries: u32,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Behavior per event:
|
||||||
|
- `MovieDiscovered { movie_id, external_metadata_id }` → build `SyncPosterCommand`, call `sync_poster::execute()` with exponential backoff
|
||||||
|
- All other events → `Ok(())` immediately
|
||||||
|
|
||||||
|
**Retry logic:** up to `max_retries` retries after the initial attempt (default: 3, so 4 total attempts), with exponential backoff delays 1s → 2s → 4s before each retry. After exhausting all attempts, logs at ERROR level and returns `Err`.
|
||||||
|
|
||||||
|
### Wiring (`main.rs`)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
|
||||||
|
let (event_publisher, event_worker) = create_event_channel(
|
||||||
|
EventPublisherConfig::from_env(),
|
||||||
|
vec![Box::new(poster_handler)],
|
||||||
|
);
|
||||||
|
tokio::spawn(event_worker.run());
|
||||||
|
```
|
||||||
|
|
||||||
|
`AppContext` is `Clone` (all fields are `Arc<dyn Trait>`), so cloning for the handler is cheap.
|
||||||
|
|
||||||
|
## Data Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /api/diary
|
||||||
|
→ log_review::execute()
|
||||||
|
→ movie not in DB → fetch metadata → MovieDiscovered published
|
||||||
|
→ review saved → ReviewLogged published
|
||||||
|
↓
|
||||||
|
mpsc channel
|
||||||
|
↓
|
||||||
|
EventWorker::run()
|
||||||
|
→ tracing log
|
||||||
|
→ PosterSyncHandler::handle(MovieDiscovered)
|
||||||
|
→ sync_poster::execute() [attempt 1]
|
||||||
|
→ on failure: sleep 1s → attempt 2
|
||||||
|
→ on failure: sleep 2s → attempt 3
|
||||||
|
→ on failure: log ERROR, done
|
||||||
|
```
|
||||||
|
|
||||||
|
## Files Changed
|
||||||
|
|
||||||
|
| File | Change |
|
||||||
|
|---|---|
|
||||||
|
| `crates/adapters/event-publisher/src/lib.rs` | Add `EventHandler` trait; update `EventWorker` and `create_event_channel` |
|
||||||
|
| `crates/presentation/src/event_handlers.rs` | New — `PosterSyncHandler` |
|
||||||
|
| `crates/presentation/src/main.rs` | Wire `PosterSyncHandler` into `create_event_channel` |
|
||||||
|
|
||||||
|
No new crate dependencies. No changes to domain or application layers.
|
||||||
|
|
||||||
|
## Verification
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo build # full workspace clean build
|
||||||
|
cargo test # existing tests still pass
|
||||||
|
|
||||||
|
# Manual: log a review for a new movie
|
||||||
|
# → check logs for "event: movie_discovered"
|
||||||
|
# → check logs for poster sync attempt
|
||||||
|
# → check object store / storage for saved poster file
|
||||||
|
```
|
||||||
30
docs/superpowers/specs/2026-05-04-frontend-html-design.md
Normal file
30
docs/superpowers/specs/2026-05-04-frontend-html-design.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Frontend HTML/CSS Design
|
||||||
|
|
||||||
|
**Date:** 2026-05-04
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
Server-rendered HTML frontend using Rust/Axum + Askama templates + HTTP-only cookie JWT auth. No JavaScript.
|
||||||
|
|
||||||
|
## Pages
|
||||||
|
|
||||||
|
| Route | Access | Description |
|
||||||
|
|---|---|---|
|
||||||
|
| GET / | public | Diary index |
|
||||||
|
| GET /login | public | Login form |
|
||||||
|
| POST /login | public | Set cookie → redirect / |
|
||||||
|
| GET /logout | — | Clear cookie → redirect / |
|
||||||
|
| GET /register | public | Only if ALLOW_REGISTRATION |
|
||||||
|
| POST /register | public | Set cookie → redirect / |
|
||||||
|
| GET /reviews/new | auth | New review form |
|
||||||
|
| POST /reviews | auth | Log review → redirect / |
|
||||||
|
|
||||||
|
## Design Decisions
|
||||||
|
|
||||||
|
- **Auth:** Cookie-based JWT (HttpOnly, SameSite=Lax). Existing Bearer auth untouched.
|
||||||
|
- **Template inheritance:** base.html owns header. Child templates use {% extends %}/{% block %}.
|
||||||
|
- **Entry layout:** Poster thumbnail (60px) + text block. Fallback to text-only when no poster.
|
||||||
|
- **Header (logged out):** [Login] [Register?]
|
||||||
|
- **Header (logged in):** [Add Review] email@example.com [Logout]
|
||||||
|
- **Form errors:** PRG → redirect back with ?error=<msg>
|
||||||
|
- **Diary visibility:** Public (anyone can read, auth required to add)
|
||||||
167
static/style.css
Normal file
167
static/style.css
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
*, *::before, *::after { box-sizing: border-box; }
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: monospace;
|
||||||
|
max-width: 720px;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 20px;
|
||||||
|
color: #222;
|
||||||
|
background: #fff;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
a { color: inherit; }
|
||||||
|
|
||||||
|
/* Header */
|
||||||
|
header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: baseline;
|
||||||
|
border-bottom: 1px solid #ccc;
|
||||||
|
padding-bottom: 8px;
|
||||||
|
margin-bottom: 28px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-title {
|
||||||
|
font-weight: bold;
|
||||||
|
text-decoration: none;
|
||||||
|
font-size: 1.1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav {
|
||||||
|
display: flex;
|
||||||
|
gap: 14px;
|
||||||
|
font-size: 0.88em;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav a { text-decoration: none; }
|
||||||
|
nav a:hover { text-decoration: underline; }
|
||||||
|
|
||||||
|
.user-email { color: #666; }
|
||||||
|
|
||||||
|
/* Diary entries */
|
||||||
|
.entry {
|
||||||
|
display: flex;
|
||||||
|
gap: 14px;
|
||||||
|
padding: 14px 0;
|
||||||
|
border-bottom: 1px solid #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
.poster {
|
||||||
|
width: 60px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.poster img {
|
||||||
|
width: 100%;
|
||||||
|
display: block;
|
||||||
|
border: 1px solid #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
.entry-body { flex: 1; }
|
||||||
|
|
||||||
|
.entry-title {
|
||||||
|
font-weight: bold;
|
||||||
|
margin-bottom: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.year { color: #666; font-weight: normal; }
|
||||||
|
|
||||||
|
.director {
|
||||||
|
color: #666;
|
||||||
|
font-size: 0.88em;
|
||||||
|
margin-bottom: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rating {
|
||||||
|
font-size: 0.88em;
|
||||||
|
margin-bottom: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.comment {
|
||||||
|
font-style: italic;
|
||||||
|
color: #444;
|
||||||
|
font-size: 0.9em;
|
||||||
|
margin-bottom: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.watched-at {
|
||||||
|
font-size: 0.8em;
|
||||||
|
color: #888;
|
||||||
|
}
|
||||||
|
|
||||||
|
.empty { color: #999; margin-top: 20px; }
|
||||||
|
|
||||||
|
/* Pagination */
|
||||||
|
.pagination {
|
||||||
|
margin-top: 24px;
|
||||||
|
display: flex;
|
||||||
|
gap: 20px;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pagination a { text-decoration: none; }
|
||||||
|
.pagination a:hover { text-decoration: underline; }
|
||||||
|
|
||||||
|
/* Forms */
|
||||||
|
h1 { font-size: 1.2em; margin-bottom: 20px; }
|
||||||
|
|
||||||
|
form {
|
||||||
|
max-width: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
form label {
|
||||||
|
display: block;
|
||||||
|
margin-bottom: 14px;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
form input[type="text"],
|
||||||
|
form input[type="email"],
|
||||||
|
form input[type="password"],
|
||||||
|
form input[type="number"],
|
||||||
|
form input[type="datetime-local"],
|
||||||
|
form textarea {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 1em;
|
||||||
|
margin-top: 3px;
|
||||||
|
padding: 5px 6px;
|
||||||
|
border: 1px solid #bbb;
|
||||||
|
border-radius: 0;
|
||||||
|
background: #fff;
|
||||||
|
color: #222;
|
||||||
|
}
|
||||||
|
|
||||||
|
form textarea {
|
||||||
|
height: 80px;
|
||||||
|
resize: vertical;
|
||||||
|
}
|
||||||
|
|
||||||
|
form hr {
|
||||||
|
border: none;
|
||||||
|
border-top: 1px solid #eee;
|
||||||
|
margin: 18px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
form button[type="submit"] {
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 1em;
|
||||||
|
padding: 6px 18px;
|
||||||
|
cursor: pointer;
|
||||||
|
background: #222;
|
||||||
|
color: #fff;
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
form button[type="submit"]:hover { background: #444; }
|
||||||
|
|
||||||
|
.optional { color: #888; font-size: 0.85em; }
|
||||||
|
|
||||||
|
/* Errors */
|
||||||
|
.error {
|
||||||
|
color: #c00;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user