Compare commits
8 Commits
317898d51b
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 7a66661932 | |||
| b30a6a102b | |||
| 38a3aa6bbf | |||
| 3135a15cb3 | |||
| d083f8ae3d | |||
| 874c406d4a | |||
| 78e1f4ef72 | |||
| cf74b06b4a |
@@ -4,3 +4,7 @@ target/
|
|||||||
*.db
|
*.db
|
||||||
*.db-shm
|
*.db-shm
|
||||||
*.db-wal
|
*.db-wal
|
||||||
|
.cargo/
|
||||||
|
.sqlx/
|
||||||
|
docs/
|
||||||
|
dev.db
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
DATABASE_URL=sqlite:./dev.db
|
DATABASE_URL=sqlite:./dev.db
|
||||||
BASE_URL=http://localhost:3000
|
BASE_URL=http://localhost:3000
|
||||||
PORT=3000
|
PORT=3000
|
||||||
|
SECURE_COOKIES=false
|
||||||
JWT_SECRET=
|
JWT_SECRET=
|
||||||
JWT_TTL_SECONDS=
|
JWT_TTL_SECONDS=
|
||||||
ALLOW_REGISTRATION=true
|
ALLOW_REGISTRATION=true
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -8,6 +8,8 @@
|
|||||||
.env.prod
|
.env.prod
|
||||||
|
|
||||||
*.db
|
*.db
|
||||||
|
*db-shm
|
||||||
|
*db-wal
|
||||||
|
|
||||||
.worktrees/
|
.worktrees/
|
||||||
.superpowers/
|
.superpowers/
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ struct UsersTemplate<'a> {
|
|||||||
|
|
||||||
struct MonthlyRatingRow<'a> {
|
struct MonthlyRatingRow<'a> {
|
||||||
rating: &'a MonthlyRating,
|
rating: &'a MonthlyRating,
|
||||||
bar_height_pct: i64,
|
bar_height_px: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
@@ -125,8 +125,8 @@ fn build_heatmap(history: &[MonthActivity]) -> Vec<HeatmapCell> {
|
|||||||
}).collect()
|
}).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bar_height_pct(avg_rating: f64) -> i64 {
|
fn bar_height_px(avg_rating: f64) -> i64 {
|
||||||
(avg_rating / 5.0 * 100.0) as i64
|
(avg_rating / 5.0 * 60.0) as i64
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AskamaHtmlRenderer;
|
pub struct AskamaHtmlRenderer;
|
||||||
@@ -207,7 +207,7 @@ impl HtmlRenderer for AskamaHtmlRenderer {
|
|||||||
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
|
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
|
||||||
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
|
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
|
||||||
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
|
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
|
||||||
bar_height_pct: bar_height_pct(r.avg_rating),
|
bar_height_px: bar_height_px(r.avg_rating),
|
||||||
rating: r,
|
rating: r,
|
||||||
}).collect())
|
}).collect())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|||||||
@@ -3,7 +3,15 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
<title>Movies Diary</title>
|
<title>{{ ctx.page_title }}</title>
|
||||||
|
<meta name="description" content="A personal movie diary — track what you watch, rate and review films.">
|
||||||
|
<meta property="og:type" content="website">
|
||||||
|
<meta property="og:site_name" content="Movies Diary">
|
||||||
|
<meta property="og:title" content="{{ ctx.page_title }}">
|
||||||
|
<meta property="og:url" content="{{ ctx.canonical_url }}">
|
||||||
|
<meta name="twitter:card" content="summary">
|
||||||
|
<meta name="twitter:title" content="{{ ctx.page_title }}">
|
||||||
|
<link rel="canonical" href="{{ ctx.canonical_url }}">
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
|
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
|
||||||
|
|||||||
@@ -81,7 +81,8 @@
|
|||||||
<div class="bar-chart">
|
<div class="bar-chart">
|
||||||
{% for row in monthly_rating_rows %}
|
{% for row in monthly_rating_rows %}
|
||||||
<div class="bar-col">
|
<div class="bar-col">
|
||||||
<div class="bar-fill" style="height: {{ row.bar_height_pct }}%"></div>
|
<div class="bar-value">{{ "{:.1}"|format(row.rating.avg_rating) }}</div>
|
||||||
|
<div class="bar-fill" style="height: {{ row.bar_height_px }}px"></div>
|
||||||
<div class="bar-month">{{ row.rating.month_label }}</div>
|
<div class="bar-month">{{ row.rating.month_label }}</div>
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AppConfig {
|
pub struct AppConfig {
|
||||||
pub allow_registration: bool,
|
pub allow_registration: bool,
|
||||||
|
pub base_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppConfig {
|
impl AppConfig {
|
||||||
@@ -8,6 +9,8 @@ impl AppConfig {
|
|||||||
let allow_registration = std::env::var("ALLOW_REGISTRATION")
|
let allow_registration = std::env::var("ALLOW_REGISTRATION")
|
||||||
.map(|v| v == "true" || v == "1")
|
.map(|v| v == "true" || v == "1")
|
||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
Self { allow_registration }
|
let base_url = std::env::var("BASE_URL")
|
||||||
|
.unwrap_or_else(|_| "http://localhost:3000".to_string());
|
||||||
|
Self { allow_registration, base_url }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ pub struct HtmlPageContext {
|
|||||||
pub user_id: Option<Uuid>,
|
pub user_id: Option<Uuid>,
|
||||||
pub register_enabled: bool,
|
pub register_enabled: bool,
|
||||||
pub rss_url: String,
|
pub rss_url: String,
|
||||||
|
pub page_title: String,
|
||||||
|
pub canonical_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HtmlPageContext {
|
impl HtmlPageContext {
|
||||||
|
|||||||
@@ -2,15 +2,25 @@ use domain::{errors::DomainError, models::User, value_objects::Email};
|
|||||||
|
|
||||||
use crate::{commands::RegisterCommand, context::AppContext};
|
use crate::{commands::RegisterCommand, context::AppContext};
|
||||||
|
|
||||||
|
const MIN_PASSWORD_LENGTH: usize = 8;
|
||||||
|
|
||||||
pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> {
|
pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> {
|
||||||
if !ctx.config.allow_registration {
|
if !ctx.config.allow_registration {
|
||||||
return Err(DomainError::Unauthorized("Registration is disabled".into()));
|
return Err(DomainError::Unauthorized("Registration is disabled".into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cmd.password.len() < MIN_PASSWORD_LENGTH {
|
||||||
|
return Err(DomainError::ValidationError(
|
||||||
|
"Password must be at least 8 characters".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
let email = Email::new(cmd.email)?;
|
let email = Email::new(cmd.email)?;
|
||||||
|
|
||||||
if ctx.user_repository.find_by_email(&email).await?.is_some() {
|
if ctx.user_repository.find_by_email(&email).await?.is_some() {
|
||||||
return Err(DomainError::ValidationError("Email already registered".into()));
|
return Err(DomainError::ValidationError(
|
||||||
|
"Email already registered".into(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let hash = ctx.password_hasher.hash(&cmd.password).await?;
|
let hash = ctx.password_hasher.hash(&cmd.password).await?;
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ mod tests {
|
|||||||
auth_service: Arc::new(PanicAuth),
|
auth_service: Arc::new(PanicAuth),
|
||||||
password_hasher: Arc::new(PanicHasher),
|
password_hasher: Arc::new(PanicHasher),
|
||||||
user_repository: Arc::new(PanicUserRepo),
|
user_repository: Arc::new(PanicUserRepo),
|
||||||
config: AppConfig { allow_registration: false },
|
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -175,7 +175,7 @@ mod tests {
|
|||||||
auth_service: Arc::new(PanicAuth),
|
auth_service: Arc::new(PanicAuth),
|
||||||
password_hasher: Arc::new(PanicHasher),
|
password_hasher: Arc::new(PanicHasher),
|
||||||
user_repository: Arc::new(PanicUserRepo),
|
user_repository: Arc::new(PanicUserRepo),
|
||||||
config: application::config::AppConfig { allow_registration: false },
|
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||||
},
|
},
|
||||||
html_renderer: Arc::new(PanicRenderer),
|
html_renderer: Arc::new(PanicRenderer),
|
||||||
rss_renderer: Arc::new(PanicRssRenderer),
|
rss_renderer: Arc::new(PanicRssRenderer),
|
||||||
@@ -282,7 +282,7 @@ mod tests {
|
|||||||
auth_service: Arc::new(PanicAuth2),
|
auth_service: Arc::new(PanicAuth2),
|
||||||
password_hasher: Arc::new(PanicHasher2),
|
password_hasher: Arc::new(PanicHasher2),
|
||||||
user_repository: Arc::new(PanicUserRepo2),
|
user_repository: Arc::new(PanicUserRepo2),
|
||||||
config: application::config::AppConfig { allow_registration: false },
|
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||||
},
|
},
|
||||||
html_renderer: Arc::new(PanicRenderer2),
|
html_renderer: Arc::new(PanicRenderer2),
|
||||||
rss_renderer: Arc::new(PanicRssRenderer2),
|
rss_renderer: Arc::new(PanicRssRenderer2),
|
||||||
@@ -341,7 +341,7 @@ mod tests {
|
|||||||
auth_service: Arc::new(RejectingAuth),
|
auth_service: Arc::new(RejectingAuth),
|
||||||
password_hasher: Arc::new(PanicHasher3),
|
password_hasher: Arc::new(PanicHasher3),
|
||||||
user_repository: Arc::new(PanicUserRepo3),
|
user_repository: Arc::new(PanicUserRepo3),
|
||||||
config: application::config::AppConfig { allow_registration: false },
|
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||||
},
|
},
|
||||||
html_renderer: Arc::new(PanicRenderer3),
|
html_renderer: Arc::new(PanicRenderer3),
|
||||||
rss_renderer: Arc::new(PanicRssRenderer3),
|
rss_renderer: Arc::new(PanicRssRenderer3),
|
||||||
|
|||||||
@@ -43,6 +43,8 @@ pub mod html {
|
|||||||
user_id: uuid,
|
user_id: uuid,
|
||||||
register_enabled: state.app_ctx.config.allow_registration,
|
register_enabled: state.app_ctx.config.allow_registration,
|
||||||
rss_url: "/feed.rss".to_string(),
|
rss_url: "/feed.rss".to_string(),
|
||||||
|
page_title: "Movies Diary".to_string(),
|
||||||
|
canonical_url: state.app_ctx.config.base_url.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -53,10 +55,14 @@ pub mod html {
|
|||||||
.replace('"', "%22")
|
.replace('"', "%22")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn secure_flag() -> &'static str {
|
||||||
|
if std::env::var("SECURE_COOKIES").as_deref() == Ok("true") { "; Secure" } else { "" }
|
||||||
|
}
|
||||||
|
|
||||||
fn set_cookie_header(token: &str, max_age: i64) -> (axum::http::HeaderName, HeaderValue) {
|
fn set_cookie_header(token: &str, max_age: i64) -> (axum::http::HeaderName, HeaderValue) {
|
||||||
let val = format!(
|
let val = format!(
|
||||||
"token={}; HttpOnly; Path=/; SameSite=Lax; Max-Age={}",
|
"token={}; HttpOnly; Path=/; SameSite=Strict; Max-Age={}{}",
|
||||||
token, max_age
|
token, max_age, secure_flag()
|
||||||
);
|
);
|
||||||
(SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"))
|
(SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"))
|
||||||
}
|
}
|
||||||
@@ -70,6 +76,8 @@ pub mod html {
|
|||||||
user_id: None,
|
user_id: None,
|
||||||
register_enabled: state.app_ctx.config.allow_registration,
|
register_enabled: state.app_ctx.config.allow_registration,
|
||||||
rss_url: "/feed.rss".to_string(),
|
rss_url: "/feed.rss".to_string(),
|
||||||
|
page_title: "Login — Movies Diary".to_string(),
|
||||||
|
canonical_url: format!("{}/login", state.app_ctx.config.base_url),
|
||||||
};
|
};
|
||||||
let html = state
|
let html = state
|
||||||
.html_renderer
|
.html_renderer
|
||||||
@@ -104,10 +112,8 @@ pub mod html {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_logout() -> impl IntoResponse {
|
pub async fn get_logout() -> impl IntoResponse {
|
||||||
let cookie = (
|
let val = format!("token=; HttpOnly; Path=/; SameSite=Strict; Max-Age=0{}", secure_flag());
|
||||||
SET_COOKIE,
|
let cookie = (SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"));
|
||||||
HeaderValue::from_static("token=; HttpOnly; Path=/; SameSite=Lax; Max-Age=0"),
|
|
||||||
);
|
|
||||||
([cookie], Redirect::to("/")).into_response()
|
([cookie], Redirect::to("/")).into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,6 +129,8 @@ pub mod html {
|
|||||||
user_id: None,
|
user_id: None,
|
||||||
register_enabled: true,
|
register_enabled: true,
|
||||||
rss_url: "/feed.rss".to_string(),
|
rss_url: "/feed.rss".to_string(),
|
||||||
|
page_title: "Register — Movies Diary".to_string(),
|
||||||
|
canonical_url: format!("{}/register", state.app_ctx.config.base_url),
|
||||||
};
|
};
|
||||||
let html = state
|
let html = state
|
||||||
.html_renderer
|
.html_renderer
|
||||||
@@ -162,9 +170,8 @@ pub mod html {
|
|||||||
Err(_) => Redirect::to("/login").into_response(),
|
Err(_) => Redirect::to("/login").into_response(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(_) => {
|
||||||
let msg = encode_error(&e.to_string());
|
Redirect::to("/register?error=Registration+failed.+Please+try+again.").into_response()
|
||||||
Redirect::to(&format!("/register?error={}", msg)).into_response()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -174,7 +181,9 @@ pub mod html {
|
|||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Query(params): Query<ErrorQuery>,
|
Query(params): Query<ErrorQuery>,
|
||||||
) -> impl IntoResponse {
|
) -> impl IntoResponse {
|
||||||
let ctx = build_page_context(&state, Some(user_id)).await;
|
let mut ctx = build_page_context(&state, Some(user_id)).await;
|
||||||
|
ctx.page_title = "Log a Review — Movies Diary".to_string();
|
||||||
|
ctx.canonical_url = format!("{}/reviews/new", state.app_ctx.config.base_url);
|
||||||
let html = state
|
let html = state
|
||||||
.html_renderer
|
.html_renderer
|
||||||
.render_new_review_page(NewReviewPageData {
|
.render_new_review_page(NewReviewPageData {
|
||||||
@@ -261,7 +270,9 @@ pub mod html {
|
|||||||
OptionalCookieUser(user_id): OptionalCookieUser,
|
OptionalCookieUser(user_id): OptionalCookieUser,
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
) -> impl IntoResponse {
|
) -> impl IntoResponse {
|
||||||
let ctx = build_page_context(&state, user_id).await;
|
let mut ctx = build_page_context(&state, user_id).await;
|
||||||
|
ctx.page_title = "Members — Movies Diary".to_string();
|
||||||
|
ctx.canonical_url = format!("{}/users", state.app_ctx.config.base_url);
|
||||||
match application::use_cases::get_users::execute(&state.app_ctx, application::queries::GetUsersQuery).await {
|
match application::use_cases::get_users::execute(&state.app_ctx, application::queries::GetUsersQuery).await {
|
||||||
Ok(users) => {
|
Ok(users) => {
|
||||||
let data = application::ports::UsersPageData { ctx, users };
|
let data = application::ports::UsersPageData { ctx, users };
|
||||||
@@ -292,6 +303,11 @@ pub mod html {
|
|||||||
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let display_name = profile_user.email().value()
|
||||||
|
.split('@').next().unwrap_or("User");
|
||||||
|
ctx.page_title = format!("{}'s Diary — Movies Diary", display_name);
|
||||||
|
ctx.canonical_url = format!("{}/users/{}", state.app_ctx.config.base_url, profile_user_uuid);
|
||||||
|
|
||||||
let query = application::queries::GetUserProfileQuery {
|
let query = application::queries::GetUserProfileQuery {
|
||||||
user_id: profile_user_uuid,
|
user_id: profile_user_uuid,
|
||||||
view: view.clone(),
|
view: view.clone(),
|
||||||
|
|||||||
@@ -32,8 +32,11 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let app = routes::build_router(state);
|
let app = routes::build_router(state);
|
||||||
|
|
||||||
let listener = TcpListener::bind("0.0.0.0:3000").await?;
|
let host = std::env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string());
|
||||||
tracing::info!("Listening on 0.0.0.0:3000");
|
let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string());
|
||||||
|
let addr = format!("{}:{}", host, port);
|
||||||
|
let listener = TcpListener::bind(&addr).await?;
|
||||||
|
tracing::info!("Listening on {}", addr);
|
||||||
axum::serve(listener, app).await?;
|
axum::serve(listener, app).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -48,7 +51,9 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
|||||||
let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL must be set")?;
|
let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL must be set")?;
|
||||||
let opts = SqliteConnectOptions::from_str(&database_url)
|
let opts = SqliteConnectOptions::from_str(&database_url)
|
||||||
.context("Invalid DATABASE_URL")?
|
.context("Invalid DATABASE_URL")?
|
||||||
.create_if_missing(true);
|
.create_if_missing(true)
|
||||||
|
.journal_mode(sqlx::sqlite::SqliteJournalMode::Wal)
|
||||||
|
.busy_timeout(std::time::Duration::from_secs(5));
|
||||||
let pool = SqlitePool::connect_with(opts)
|
let pool = SqlitePool::connect_with(opts)
|
||||||
.await
|
.await
|
||||||
.context("Failed to connect to SQLite database")?;
|
.context("Failed to connect to SQLite database")?;
|
||||||
|
|||||||
@@ -1,8 +1,52 @@
|
|||||||
use axum::{Router, routing};
|
use std::sync::{
|
||||||
|
Arc,
|
||||||
|
atomic::{AtomicU64, Ordering},
|
||||||
|
};
|
||||||
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
use axum::{Router, http::StatusCode, middleware, response::IntoResponse, routing};
|
||||||
use tower_http::{services::ServeDir, trace::TraceLayer};
|
use tower_http::{services::ServeDir, trace::TraceLayer};
|
||||||
|
|
||||||
use crate::{handlers, state::AppState};
|
use crate::{handlers, state::AppState};
|
||||||
|
|
||||||
|
const API_RATE_LIMIT: u64 = 20; // 20 requests per minute globally for API routes
|
||||||
|
|
||||||
|
/// Simple global rate limiter: tracks request count per 60-second window.
|
||||||
|
/// Not per-IP — suitable for a low-traffic personal app.
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct RateLimiter {
|
||||||
|
window: Arc<AtomicU64>,
|
||||||
|
count: Arc<AtomicU64>,
|
||||||
|
limit: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RateLimiter {
|
||||||
|
fn new(limit: u64) -> Self {
|
||||||
|
Self {
|
||||||
|
window: Arc::new(AtomicU64::new(0)),
|
||||||
|
count: Arc::new(AtomicU64::new(0)),
|
||||||
|
limit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check(&self) -> bool {
|
||||||
|
let now = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.as_secs()
|
||||||
|
/ 60;
|
||||||
|
let prev = self.window.load(Ordering::Acquire);
|
||||||
|
if now != prev {
|
||||||
|
// compare_exchange ensures only one thread wins the window reset
|
||||||
|
if self.window.compare_exchange(prev, now, Ordering::AcqRel, Ordering::Relaxed).is_ok() {
|
||||||
|
self.count.store(1, Ordering::Release);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.count.fetch_add(1, Ordering::Relaxed) + 1 <= self.limit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn build_router(state: AppState) -> Router {
|
pub fn build_router(state: AppState) -> Router {
|
||||||
Router::new()
|
Router::new()
|
||||||
.merge(html_routes())
|
.merge(html_routes())
|
||||||
@@ -13,30 +57,73 @@ pub fn build_router(state: AppState) -> Router {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn html_routes() -> Router<AppState> {
|
fn html_routes() -> Router<AppState> {
|
||||||
Router::new()
|
// Auth routes: 20 requests per minute globally.
|
||||||
.route("/", routing::get(handlers::html::get_activity_feed))
|
let limiter = RateLimiter::new(API_RATE_LIMIT);
|
||||||
.route("/users", routing::get(handlers::html::get_users_list))
|
let auth = Router::new()
|
||||||
.route("/users/{id}", routing::get(handlers::html::get_user_profile))
|
|
||||||
.route(
|
.route(
|
||||||
"/login",
|
"/login",
|
||||||
routing::get(handlers::html::get_login_page)
|
routing::get(handlers::html::get_login_page).post(handlers::html::post_login),
|
||||||
.post(handlers::html::post_login),
|
|
||||||
)
|
)
|
||||||
.route("/logout", routing::get(handlers::html::get_logout))
|
.route("/logout", routing::get(handlers::html::get_logout))
|
||||||
.route(
|
.route(
|
||||||
"/register",
|
"/register",
|
||||||
routing::get(handlers::html::get_register_page)
|
routing::get(handlers::html::get_register_page).post(handlers::html::post_register),
|
||||||
.post(handlers::html::post_register),
|
)
|
||||||
|
.route_layer(middleware::from_fn(
|
||||||
|
move |req: axum::extract::Request, next: middleware::Next| {
|
||||||
|
let limiter = limiter.clone();
|
||||||
|
async move {
|
||||||
|
if limiter.check() {
|
||||||
|
next.run(req).await
|
||||||
|
} else {
|
||||||
|
StatusCode::TOO_MANY_REQUESTS.into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
));
|
||||||
|
|
||||||
|
Router::new()
|
||||||
|
.route("/", routing::get(handlers::html::get_activity_feed))
|
||||||
|
.route("/users", routing::get(handlers::html::get_users_list))
|
||||||
|
.route(
|
||||||
|
"/users/{id}",
|
||||||
|
routing::get(handlers::html::get_user_profile),
|
||||||
|
)
|
||||||
|
.merge(auth)
|
||||||
|
.route(
|
||||||
|
"/reviews/new",
|
||||||
|
routing::get(handlers::html::get_new_review_page),
|
||||||
)
|
)
|
||||||
.route("/reviews/new", routing::get(handlers::html::get_new_review_page))
|
|
||||||
.route("/reviews", routing::post(handlers::html::post_review))
|
.route("/reviews", routing::post(handlers::html::post_review))
|
||||||
.route("/reviews/{id}/delete", routing::post(handlers::html::post_delete_review))
|
.route(
|
||||||
.route("/posters/{path}", routing::get(handlers::posters::get_poster))
|
"/reviews/{id}/delete",
|
||||||
|
routing::post(handlers::html::post_delete_review),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
"/posters/{path}",
|
||||||
|
routing::get(handlers::posters::get_poster),
|
||||||
|
)
|
||||||
.route("/feed.rss", routing::get(handlers::rss::get_feed))
|
.route("/feed.rss", routing::get(handlers::rss::get_feed))
|
||||||
.route("/users/{id}/feed.rss", routing::get(handlers::rss::get_user_feed))
|
.route(
|
||||||
|
"/users/{id}/feed.rss",
|
||||||
|
routing::get(handlers::rss::get_user_feed),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn api_routes() -> Router<AppState> {
|
fn api_routes() -> Router<AppState> {
|
||||||
|
let limiter = RateLimiter::new(API_RATE_LIMIT);
|
||||||
|
let auth_rate_limit =
|
||||||
|
middleware::from_fn(move |req: axum::extract::Request, next: middleware::Next| {
|
||||||
|
let limiter = limiter.clone();
|
||||||
|
async move {
|
||||||
|
if limiter.check() {
|
||||||
|
next.run(req).await
|
||||||
|
} else {
|
||||||
|
StatusCode::TOO_MANY_REQUESTS.into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
Router::new().nest(
|
Router::new().nest(
|
||||||
"/api",
|
"/api",
|
||||||
Router::new()
|
Router::new()
|
||||||
@@ -46,12 +133,16 @@ fn api_routes() -> Router<AppState> {
|
|||||||
routing::get(handlers::api::get_review_history),
|
routing::get(handlers::api::get_review_history),
|
||||||
)
|
)
|
||||||
.route("/reviews", routing::post(handlers::api::post_review))
|
.route("/reviews", routing::post(handlers::api::post_review))
|
||||||
.route("/reviews/{id}", routing::delete(handlers::api::delete_review))
|
.route(
|
||||||
|
"/reviews/{id}",
|
||||||
|
routing::delete(handlers::api::delete_review),
|
||||||
|
)
|
||||||
.route(
|
.route(
|
||||||
"/movies/{id}/sync-poster",
|
"/movies/{id}/sync-poster",
|
||||||
routing::post(handlers::api::sync_poster),
|
routing::post(handlers::api::sync_poster),
|
||||||
)
|
)
|
||||||
.route("/auth/login", routing::post(handlers::api::login))
|
.route("/auth/login", routing::post(handlers::api::login))
|
||||||
.route("/auth/register", routing::post(handlers::api::register)),
|
.route("/auth/register", routing::post(handlers::api::register))
|
||||||
|
.route_layer(auth_rate_limit),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -105,7 +105,7 @@ async fn test_app() -> Router {
|
|||||||
auth_service: Arc::new(PanicAuth),
|
auth_service: Arc::new(PanicAuth),
|
||||||
password_hasher: Arc::new(PanicHasher),
|
password_hasher: Arc::new(PanicHasher),
|
||||||
user_repository: Arc::new(NobodyUserRepo),
|
user_repository: Arc::new(NobodyUserRepo),
|
||||||
config: AppConfig { allow_registration: false },
|
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||||
},
|
},
|
||||||
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
|
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
|
||||||
rss_renderer: Arc::new(RssAdapter::new("http://localhost:3000".into())),
|
rss_renderer: Arc::new(RssAdapter::new("http://localhost:3000".into())),
|
||||||
|
|||||||
@@ -1,620 +0,0 @@
|
|||||||
# Event-Driven Poster Sync Implementation Plan
|
|
||||||
|
|
||||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
|
||||||
|
|
||||||
**Goal:** Add an `EventHandler` trait to the event-publisher adapter and implement `PosterSyncHandler` so that a `MovieDiscovered` event automatically triggers the existing `sync_poster` use case with exponential-backoff retry.
|
|
||||||
|
|
||||||
**Architecture:** `EventWorker` gains a `Vec<Box<dyn EventHandler>>` and fans out each received event to all registered handlers sequentially. `PosterSyncHandler` lives in the `presentation` crate (composition root), holds `AppContext`, and calls `sync_poster::execute` on `MovieDiscovered` events — ignoring all others. Retry is up to 3 retries (4 total attempts) with delays 1s → 2s → 4s.
|
|
||||||
|
|
||||||
**Tech Stack:** Rust, tokio::sync::mpsc, async-trait, existing `sync_poster` use case
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## File Map
|
|
||||||
|
|
||||||
| File | Status | Responsibility |
|
|
||||||
|---|---|---|
|
|
||||||
| `crates/adapters/event-publisher/src/lib.rs` | Modify | Add `EventHandler` trait; extend `EventWorker` and `create_event_channel` |
|
|
||||||
| `crates/application/src/commands.rs` | Modify | Add `#[derive(Clone)]` to `SyncPosterCommand` |
|
|
||||||
| `crates/presentation/src/lib.rs` | Modify | Expose `pub mod event_handlers` |
|
|
||||||
| `crates/presentation/src/event_handlers.rs` | Create | `PosterSyncHandler` implementation |
|
|
||||||
| `crates/presentation/src/main.rs` | Modify | Wire `PosterSyncHandler` into `create_event_channel` |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Task 1: Add `EventHandler` trait and update `EventWorker`
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `crates/adapters/event-publisher/src/lib.rs`
|
|
||||||
|
|
||||||
- [ ] **Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add to the bottom of `crates/adapters/event-publisher/src/lib.rs`:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::sync::{Arc, Mutex};
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use domain::{
|
|
||||||
errors::DomainError,
|
|
||||||
events::DomainEvent,
|
|
||||||
value_objects::{ExternalMetadataId, MovieId},
|
|
||||||
};
|
|
||||||
|
|
||||||
struct RecordingHandler {
|
|
||||||
calls: Arc<Mutex<Vec<String>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl EventHandler for RecordingHandler {
|
|
||||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
|
||||||
let label = match event {
|
|
||||||
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
|
|
||||||
DomainEvent::ReviewLogged { .. } => "review_logged",
|
|
||||||
};
|
|
||||||
self.calls.lock().unwrap().push(label.to_string());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn single_handler_receives_event() {
|
|
||||||
let calls = Arc::new(Mutex::new(vec![]));
|
|
||||||
let handler = RecordingHandler { calls: Arc::clone(&calls) };
|
|
||||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
|
||||||
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
|
|
||||||
|
|
||||||
tokio::spawn(worker.run());
|
|
||||||
|
|
||||||
let event = DomainEvent::MovieDiscovered {
|
|
||||||
movie_id: MovieId::generate(),
|
|
||||||
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
|
|
||||||
};
|
|
||||||
publisher.publish(&event).await.unwrap();
|
|
||||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
|
||||||
|
|
||||||
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn multiple_handlers_all_receive_event() {
|
|
||||||
let calls1 = Arc::new(Mutex::new(vec![]));
|
|
||||||
let calls2 = Arc::new(Mutex::new(vec![]));
|
|
||||||
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
|
|
||||||
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
|
|
||||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
|
||||||
let (publisher, worker) = create_event_channel(
|
|
||||||
config,
|
|
||||||
vec![Box::new(handler1), Box::new(handler2)],
|
|
||||||
);
|
|
||||||
|
|
||||||
tokio::spawn(worker.run());
|
|
||||||
|
|
||||||
let event = DomainEvent::MovieDiscovered {
|
|
||||||
movie_id: MovieId::generate(),
|
|
||||||
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
|
|
||||||
};
|
|
||||||
publisher.publish(&event).await.unwrap();
|
|
||||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
|
||||||
|
|
||||||
assert_eq!(calls1.lock().unwrap().len(), 1);
|
|
||||||
assert_eq!(calls2.lock().unwrap().len(), 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn handler_error_does_not_stop_worker() {
|
|
||||||
struct FailingHandler;
|
|
||||||
#[async_trait]
|
|
||||||
impl EventHandler for FailingHandler {
|
|
||||||
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
|
|
||||||
Err(DomainError::InfrastructureError("boom".into()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let calls = Arc::new(Mutex::new(vec![]));
|
|
||||||
let good = RecordingHandler { calls: Arc::clone(&calls) };
|
|
||||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
|
||||||
let (publisher, worker) = create_event_channel(
|
|
||||||
config,
|
|
||||||
vec![Box::new(FailingHandler), Box::new(good)],
|
|
||||||
);
|
|
||||||
|
|
||||||
tokio::spawn(worker.run());
|
|
||||||
|
|
||||||
let event = DomainEvent::MovieDiscovered {
|
|
||||||
movie_id: MovieId::generate(),
|
|
||||||
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
|
|
||||||
};
|
|
||||||
publisher.publish(&event).await.unwrap();
|
|
||||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
|
||||||
|
|
||||||
// good handler still ran despite failing handler before it
|
|
||||||
assert_eq!(calls.lock().unwrap().len(), 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Step 2: Run tests to verify they fail**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo test -p event-publisher 2>&1 | tail -20
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected: compile errors — `EventHandler` not defined, `create_event_channel` wrong arity.
|
|
||||||
|
|
||||||
- [ ] **Step 3: Replace `lib.rs` with updated implementation**
|
|
||||||
|
|
||||||
Replace the full content of `crates/adapters/event-publisher/src/lib.rs` with:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
|
|
||||||
pub struct EventPublisherConfig {
|
|
||||||
pub channel_buffer: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EventPublisherConfig {
|
|
||||||
pub fn from_env() -> Self {
|
|
||||||
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
|
|
||||||
.ok()
|
|
||||||
.and_then(|v| v.parse().ok())
|
|
||||||
.unwrap_or(128);
|
|
||||||
Self { channel_buffer }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait EventHandler: Send + Sync {
|
|
||||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ChannelEventPublisher {
|
|
||||||
sender: mpsc::Sender<DomainEvent>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl EventPublisher for ChannelEventPublisher {
|
|
||||||
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
|
||||||
self.sender
|
|
||||||
.send(event.clone())
|
|
||||||
.await
|
|
||||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct EventWorker {
|
|
||||||
receiver: mpsc::Receiver<DomainEvent>,
|
|
||||||
handlers: Vec<Box<dyn EventHandler>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EventWorker {
|
|
||||||
pub async fn run(mut self) {
|
|
||||||
while let Some(event) = self.receiver.recv().await {
|
|
||||||
match &event {
|
|
||||||
DomainEvent::ReviewLogged {
|
|
||||||
review_id,
|
|
||||||
movie_id,
|
|
||||||
user_id,
|
|
||||||
rating,
|
|
||||||
watched_at,
|
|
||||||
} => {
|
|
||||||
tracing::info!(
|
|
||||||
review_id = %review_id.value(),
|
|
||||||
movie_id = %movie_id.value(),
|
|
||||||
user_id = %user_id.value(),
|
|
||||||
rating = rating.value(),
|
|
||||||
watched_at = %watched_at,
|
|
||||||
"event: review_logged"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
DomainEvent::MovieDiscovered {
|
|
||||||
movie_id,
|
|
||||||
external_metadata_id,
|
|
||||||
} => {
|
|
||||||
tracing::info!(
|
|
||||||
movie_id = %movie_id.value(),
|
|
||||||
external_id = external_metadata_id.value(),
|
|
||||||
"event: movie_discovered"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for handler in &self.handlers {
|
|
||||||
if let Err(e) = handler.handle(&event).await {
|
|
||||||
tracing::error!("event handler error: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tracing::info!("event worker shut down");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_event_channel(
|
|
||||||
config: EventPublisherConfig,
|
|
||||||
handlers: Vec<Box<dyn EventHandler>>,
|
|
||||||
) -> (ChannelEventPublisher, EventWorker) {
|
|
||||||
let (tx, rx) = mpsc::channel(config.channel_buffer);
|
|
||||||
(
|
|
||||||
ChannelEventPublisher { sender: tx },
|
|
||||||
EventWorker {
|
|
||||||
receiver: rx,
|
|
||||||
handlers,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
// paste the test module from Step 1 here
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Step 4: Run tests to verify they pass**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo test -p event-publisher 2>&1 | tail -20
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected: `test result: ok. 3 passed`
|
|
||||||
|
|
||||||
- [ ] **Step 5: Commit**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git add crates/adapters/event-publisher/src/lib.rs
|
|
||||||
git commit -m "feat(event-publisher): add EventHandler trait and fan-out in EventWorker"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Task 2: Derive `Clone` on `SyncPosterCommand`
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `crates/application/src/commands.rs`
|
|
||||||
|
|
||||||
The `PosterSyncHandler` retry loop reconstructs the command on each attempt, which requires `Clone` on `String` (already impl'd) and `Uuid` (Copy) — but it's cleaner to `#[derive(Clone)]` directly.
|
|
||||||
|
|
||||||
- [ ] **Step 1: Add `#[derive(Clone)]` to `SyncPosterCommand`**
|
|
||||||
|
|
||||||
In `crates/application/src/commands.rs`, find the `SyncPosterCommand` struct (line ~17) and add the derive:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SyncPosterCommand {
|
|
||||||
pub movie_id: Uuid,
|
|
||||||
pub external_metadata_id: String,
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Step 2: Verify it compiles**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo build -p application 2>&1 | tail -10
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected: clean build.
|
|
||||||
|
|
||||||
- [ ] **Step 3: Commit**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git add crates/application/src/commands.rs
|
|
||||||
git commit -m "feat(application): derive Clone on SyncPosterCommand"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Task 3: Implement `PosterSyncHandler`
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Create: `crates/presentation/src/event_handlers.rs`
|
|
||||||
- Modify: `crates/presentation/src/lib.rs`
|
|
||||||
|
|
||||||
- [ ] **Step 1: Write the failing test first — create `event_handlers.rs` with tests only**
|
|
||||||
|
|
||||||
Create `crates/presentation/src/event_handlers.rs`:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use domain::{errors::DomainError, events::DomainEvent};
|
|
||||||
use event_publisher::EventHandler;
|
|
||||||
|
|
||||||
pub struct PosterSyncHandler {
|
|
||||||
ctx: AppContext,
|
|
||||||
max_retries: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PosterSyncHandler {
|
|
||||||
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
|
|
||||||
Self { ctx, max_retries }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl EventHandler for PosterSyncHandler {
|
|
||||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use application::config::AppConfig;
|
|
||||||
use domain::{
|
|
||||||
errors::DomainError,
|
|
||||||
events::DomainEvent,
|
|
||||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
|
||||||
ports::{
|
|
||||||
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
|
|
||||||
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
|
|
||||||
},
|
|
||||||
value_objects::{
|
|
||||||
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
|
||||||
Rating, ReleaseYear, ReviewId, UserId,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Panic stubs — never called in the "ignored event" test path
|
|
||||||
struct PanicRepo;
|
|
||||||
struct PanicMetadata;
|
|
||||||
struct PanicFetcher;
|
|
||||||
struct PanicStorage;
|
|
||||||
struct PanicAuth;
|
|
||||||
struct PanicHasher;
|
|
||||||
struct PanicUserRepo;
|
|
||||||
struct NoopPublisher;
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl MovieRepository for PanicRepo {
|
|
||||||
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
|
||||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
|
||||||
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
|
|
||||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
|
|
||||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
|
|
||||||
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
|
|
||||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl MetadataClient for PanicMetadata {
|
|
||||||
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
|
|
||||||
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl PosterFetcherClient for PanicFetcher {
|
|
||||||
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl PosterStorage for PanicStorage {
|
|
||||||
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
|
|
||||||
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl AuthService for PanicAuth {
|
|
||||||
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
|
|
||||||
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl PasswordHasher for PanicHasher {
|
|
||||||
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
|
|
||||||
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl UserRepository for PanicUserRepo {
|
|
||||||
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
|
||||||
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl EventPublisher for NoopPublisher {
|
|
||||||
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn panic_ctx() -> AppContext {
|
|
||||||
AppContext {
|
|
||||||
repository: Arc::new(PanicRepo),
|
|
||||||
metadata_client: Arc::new(PanicMetadata),
|
|
||||||
poster_fetcher: Arc::new(PanicFetcher),
|
|
||||||
poster_storage: Arc::new(PanicStorage),
|
|
||||||
event_publisher: Arc::new(NoopPublisher),
|
|
||||||
auth_service: Arc::new(PanicAuth),
|
|
||||||
password_hasher: Arc::new(PanicHasher),
|
|
||||||
user_repository: Arc::new(PanicUserRepo),
|
|
||||||
config: AppConfig { allow_registration: false },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn review_logged_is_ignored() {
|
|
||||||
let handler = PosterSyncHandler::new(panic_ctx(), 3);
|
|
||||||
let event = DomainEvent::ReviewLogged {
|
|
||||||
review_id: ReviewId::generate(),
|
|
||||||
movie_id: MovieId::generate(),
|
|
||||||
user_id: UserId::generate(),
|
|
||||||
rating: Rating::new(4).unwrap(),
|
|
||||||
watched_at: chrono::NaiveDateTime::from_timestamp_opt(0, 0).unwrap(),
|
|
||||||
};
|
|
||||||
// returns Ok without touching any panic stubs
|
|
||||||
assert!(handler.handle(&event).await.is_ok());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Step 2: Expose the module in `lib.rs`**
|
|
||||||
|
|
||||||
Add to `crates/presentation/src/lib.rs`:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
pub mod event_handlers;
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Step 3: Run the test to verify it fails**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo test -p presentation event_handlers 2>&1 | tail -20
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected: compile error or test failure because `handle` is `todo!()`.
|
|
||||||
|
|
||||||
- [ ] **Step 4: Implement `handle` in `PosterSyncHandler`**
|
|
||||||
|
|
||||||
Replace the `todo!()` body in `crates/presentation/src/event_handlers.rs`:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
#[async_trait]
|
|
||||||
impl EventHandler for PosterSyncHandler {
|
|
||||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
|
||||||
let (movie_id, external_metadata_id) = match event {
|
|
||||||
DomainEvent::MovieDiscovered {
|
|
||||||
movie_id,
|
|
||||||
external_metadata_id,
|
|
||||||
} => (movie_id.value(), external_metadata_id.value().to_owned()),
|
|
||||||
_ => return Ok(()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut last_err: Option<DomainError> = None;
|
|
||||||
for attempt in 0..=self.max_retries {
|
|
||||||
let cmd = SyncPosterCommand {
|
|
||||||
movie_id,
|
|
||||||
external_metadata_id: external_metadata_id.clone(),
|
|
||||||
};
|
|
||||||
match sync_poster::execute(&self.ctx, cmd).await {
|
|
||||||
Ok(()) => return Ok(()),
|
|
||||||
Err(e) => {
|
|
||||||
if attempt < self.max_retries {
|
|
||||||
let delay = Duration::from_secs(2u64.pow(attempt));
|
|
||||||
tracing::warn!(
|
|
||||||
attempt = attempt + 1,
|
|
||||||
max_attempts = self.max_retries + 1,
|
|
||||||
delay_secs = delay.as_secs(),
|
|
||||||
"poster sync failed, retrying: {e}"
|
|
||||||
);
|
|
||||||
tokio::time::sleep(delay).await;
|
|
||||||
}
|
|
||||||
last_err = Some(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let err = last_err.unwrap();
|
|
||||||
tracing::error!(
|
|
||||||
attempts = self.max_retries + 1,
|
|
||||||
"poster sync failed after all attempts: {err}"
|
|
||||||
);
|
|
||||||
Err(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Step 5: Run the test to verify it passes**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo test -p presentation event_handlers 2>&1 | tail -20
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected: `test result: ok. 1 passed`
|
|
||||||
|
|
||||||
- [ ] **Step 6: Commit**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git add crates/presentation/src/event_handlers.rs crates/presentation/src/lib.rs
|
|
||||||
git commit -m "feat(presentation): implement PosterSyncHandler with retry"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Task 4: Wire `PosterSyncHandler` in `main.rs`
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `crates/presentation/src/main.rs`
|
|
||||||
|
|
||||||
- [ ] **Step 1: Add the import**
|
|
||||||
|
|
||||||
In `crates/presentation/src/main.rs`, update the import block. The existing line is:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use event_publisher::{EventPublisherConfig, create_event_channel};
|
|
||||||
```
|
|
||||||
|
|
||||||
Add below it:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use presentation::event_handlers::PosterSyncHandler;
|
|
||||||
```
|
|
||||||
|
|
||||||
- [ ] **Step 2: Wire the handler**
|
|
||||||
|
|
||||||
In `wire_dependencies`, find the two existing lines:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let (event_publisher, event_worker) = create_event_channel(EventPublisherConfig::from_env());
|
|
||||||
tokio::spawn(event_worker.run());
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace with:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
|
|
||||||
let (event_publisher, event_worker) = create_event_channel(
|
|
||||||
EventPublisherConfig::from_env(),
|
|
||||||
vec![Box::new(poster_handler)],
|
|
||||||
);
|
|
||||||
tokio::spawn(event_worker.run());
|
|
||||||
```
|
|
||||||
|
|
||||||
Note: `app_ctx.clone()` is cheap — all fields are `Arc<dyn Trait>`.
|
|
||||||
|
|
||||||
- [ ] **Step 3: Build the full workspace**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo build 2>&1 | tail -20
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected: clean build with no errors.
|
|
||||||
|
|
||||||
- [ ] **Step 4: Run all tests**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo test 2>&1 | tail -20
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected: all tests pass.
|
|
||||||
|
|
||||||
- [ ] **Step 5: Commit**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git add crates/presentation/src/main.rs
|
|
||||||
git commit -m "feat(presentation): wire PosterSyncHandler into event worker"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Verification
|
|
||||||
|
|
||||||
After all tasks complete, smoke-test end-to-end:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Start the server
|
|
||||||
RUST_LOG=info cargo run -p presentation
|
|
||||||
|
|
||||||
# In another terminal: log a review for a movie not yet in the DB
|
|
||||||
# (requires valid JWT — use the existing login endpoint first)
|
|
||||||
|
|
||||||
# Watch the server logs for:
|
|
||||||
# event: movie_discovered movie_id=<uuid> external_id=tt...
|
|
||||||
# poster sync attempt logs (or success with no retries needed)
|
|
||||||
```
|
|
||||||
|
|
||||||
To confirm the poster was stored, check the configured object store bucket/directory for a file named with the movie's UUID.
|
|
||||||
@@ -1,115 +0,0 @@
|
|||||||
# Event-Driven Poster Sync
|
|
||||||
|
|
||||||
**Date:** 2026-05-04
|
|
||||||
**Status:** Approved
|
|
||||||
|
|
||||||
## Problem
|
|
||||||
|
|
||||||
The `EventPublisher` infrastructure exists but only logs events via tracing. When a new movie is discovered (`MovieDiscovered` event), its poster should be automatically downloaded and stored — currently this requires a manual `POST /api/movies/{id}/sync-poster` call.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
- Introduce an `EventHandler` trait for composable event side-effects
|
|
||||||
- Implement `PosterSyncHandler` that reacts to `MovieDiscovered` by running the existing `sync_poster` use case with retry
|
|
||||||
- RSS feed is already generated fresh on every request — no event work needed there
|
|
||||||
|
|
||||||
## Design
|
|
||||||
|
|
||||||
### `EventHandler` trait (in `event-publisher` crate)
|
|
||||||
|
|
||||||
```rust
|
|
||||||
#[async_trait]
|
|
||||||
pub trait EventHandler: Send + Sync {
|
|
||||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Lives in `event-publisher` crate alongside `ChannelEventPublisher`. Depends only on `domain` — no new crate dependencies required.
|
|
||||||
|
|
||||||
### `EventWorker` update
|
|
||||||
|
|
||||||
`EventWorker` gains a `handlers: Vec<Box<dyn EventHandler>>` field. On each received event:
|
|
||||||
1. Log the event via tracing (existing behavior, kept as default baseline)
|
|
||||||
2. Fan out to all handlers concurrently (or sequentially — see note below)
|
|
||||||
3. Handler errors are logged at ERROR level but do not stop the worker or other handlers
|
|
||||||
|
|
||||||
`create_event_channel` signature gains a `handlers` parameter:
|
|
||||||
```rust
|
|
||||||
pub fn create_event_channel(
|
|
||||||
config: EventPublisherConfig,
|
|
||||||
handlers: Vec<Box<dyn EventHandler>>,
|
|
||||||
) -> (ChannelEventPublisher, EventWorker)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Fan-out strategy:** sequential for now (simpler, avoids concurrent mutation of shared state). If handler latency becomes a concern, switch to `tokio::join_all`.
|
|
||||||
|
|
||||||
### `PosterSyncHandler` (in `presentation` crate)
|
|
||||||
|
|
||||||
New file: `crates/presentation/src/event_handlers.rs`
|
|
||||||
|
|
||||||
```rust
|
|
||||||
pub struct PosterSyncHandler {
|
|
||||||
ctx: AppContext,
|
|
||||||
max_retries: u32,
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Behavior per event:
|
|
||||||
- `MovieDiscovered { movie_id, external_metadata_id }` → build `SyncPosterCommand`, call `sync_poster::execute()` with exponential backoff
|
|
||||||
- All other events → `Ok(())` immediately
|
|
||||||
|
|
||||||
**Retry logic:** up to `max_retries` retries after the initial attempt (default: 3, so 4 total attempts), with exponential backoff delays 1s → 2s → 4s before each retry. After exhausting all attempts, logs at ERROR level and returns `Err`.
|
|
||||||
|
|
||||||
### Wiring (`main.rs`)
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
|
|
||||||
let (event_publisher, event_worker) = create_event_channel(
|
|
||||||
EventPublisherConfig::from_env(),
|
|
||||||
vec![Box::new(poster_handler)],
|
|
||||||
);
|
|
||||||
tokio::spawn(event_worker.run());
|
|
||||||
```
|
|
||||||
|
|
||||||
`AppContext` is `Clone` (all fields are `Arc<dyn Trait>`), so cloning for the handler is cheap.
|
|
||||||
|
|
||||||
## Data Flow
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /api/diary
|
|
||||||
→ log_review::execute()
|
|
||||||
→ movie not in DB → fetch metadata → MovieDiscovered published
|
|
||||||
→ review saved → ReviewLogged published
|
|
||||||
↓
|
|
||||||
mpsc channel
|
|
||||||
↓
|
|
||||||
EventWorker::run()
|
|
||||||
→ tracing log
|
|
||||||
→ PosterSyncHandler::handle(MovieDiscovered)
|
|
||||||
→ sync_poster::execute() [attempt 1]
|
|
||||||
→ on failure: sleep 1s → attempt 2
|
|
||||||
→ on failure: sleep 2s → attempt 3
|
|
||||||
→ on failure: log ERROR, done
|
|
||||||
```
|
|
||||||
|
|
||||||
## Files Changed
|
|
||||||
|
|
||||||
| File | Change |
|
|
||||||
|---|---|
|
|
||||||
| `crates/adapters/event-publisher/src/lib.rs` | Add `EventHandler` trait; update `EventWorker` and `create_event_channel` |
|
|
||||||
| `crates/presentation/src/event_handlers.rs` | New — `PosterSyncHandler` |
|
|
||||||
| `crates/presentation/src/main.rs` | Wire `PosterSyncHandler` into `create_event_channel` |
|
|
||||||
|
|
||||||
No new crate dependencies. No changes to domain or application layers.
|
|
||||||
|
|
||||||
## Verification
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo build # full workspace clean build
|
|
||||||
cargo test # existing tests still pass
|
|
||||||
|
|
||||||
# Manual: log a review for a new movie
|
|
||||||
# → check logs for "event: movie_discovered"
|
|
||||||
# → check logs for poster sync attempt
|
|
||||||
# → check object store / storage for saved poster file
|
|
||||||
```
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
# Frontend HTML/CSS Design
|
|
||||||
|
|
||||||
**Date:** 2026-05-04
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
Server-rendered HTML frontend using Rust/Axum + Askama templates + HTTP-only cookie JWT auth. No JavaScript.
|
|
||||||
|
|
||||||
## Pages
|
|
||||||
|
|
||||||
| Route | Access | Description |
|
|
||||||
|---|---|---|
|
|
||||||
| GET / | public | Diary index |
|
|
||||||
| GET /login | public | Login form |
|
|
||||||
| POST /login | public | Set cookie → redirect / |
|
|
||||||
| GET /logout | — | Clear cookie → redirect / |
|
|
||||||
| GET /register | public | Only if ALLOW_REGISTRATION |
|
|
||||||
| POST /register | public | Set cookie → redirect / |
|
|
||||||
| GET /reviews/new | auth | New review form |
|
|
||||||
| POST /reviews | auth | Log review → redirect / |
|
|
||||||
|
|
||||||
## Design Decisions
|
|
||||||
|
|
||||||
- **Auth:** Cookie-based JWT (HttpOnly, SameSite=Lax). Existing Bearer auth untouched.
|
|
||||||
- **Template inheritance:** base.html owns header. Child templates use {% extends %}/{% block %}.
|
|
||||||
- **Entry layout:** Poster thumbnail (60px) + text block. Fallback to text-only when no poster.
|
|
||||||
- **Header (logged out):** [Login] [Register?]
|
|
||||||
- **Header (logged in):** [Add Review] email@example.com [Logout]
|
|
||||||
- **Form errors:** PRG → redirect back with ?error=<msg>
|
|
||||||
- **Diary visibility:** Public (anyone can read, auth required to add)
|
|
||||||
@@ -34,6 +34,21 @@ body {
|
|||||||
background: url("/static/background.avif") center / cover no-repeat fixed;
|
background: url("/static/background.avif") center / cover no-repeat fixed;
|
||||||
min-height: 100%;
|
min-height: 100%;
|
||||||
line-height: 1.5;
|
line-height: 1.5;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
body::before {
|
||||||
|
content: "";
|
||||||
|
position: fixed;
|
||||||
|
inset: 0;
|
||||||
|
background: rgba(0, 0, 0, 0.2);
|
||||||
|
z-index: 0;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
body > * {
|
||||||
|
position: relative;
|
||||||
|
z-index: 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
a {
|
a {
|
||||||
@@ -490,10 +505,10 @@ form button[type="submit"]:hover {
|
|||||||
display: flex;
|
display: flex;
|
||||||
align-items: flex-end;
|
align-items: flex-end;
|
||||||
gap: 4px;
|
gap: 4px;
|
||||||
height: 80px;
|
|
||||||
}
|
}
|
||||||
.bar-col { flex: 1; display: flex; flex-direction: column; align-items: center; justify-content: flex-end; gap: 3px; }
|
.bar-col { flex: 1; display: flex; flex-direction: column; align-items: center; gap: 2px; }
|
||||||
.bar-fill { width: 100%; background: var(--primary); border-radius: 3px 3px 0 0; min-height: 4px; opacity: 0.8; }
|
.bar-value { font-size: 0.6rem; color: var(--primary); opacity: 0.9; line-height: 1; }
|
||||||
|
.bar-fill { width: 100%; background: var(--primary); border-radius: 3px 3px 0 0; min-height: 3px; opacity: 0.8; }
|
||||||
.bar-month { font-size: 0.65rem; opacity: 0.5; }
|
.bar-month { font-size: 0.65rem; opacity: 0.5; }
|
||||||
|
|
||||||
.director-chart { display: flex; flex-direction: column; gap: 6px; }
|
.director-chart { display: flex; flex-direction: column; gap: 6px; }
|
||||||
|
|||||||
Reference in New Issue
Block a user