Compare commits
20 Commits
e28f628c80
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 7a66661932 | |||
| b30a6a102b | |||
| 38a3aa6bbf | |||
| 3135a15cb3 | |||
| d083f8ae3d | |||
| 874c406d4a | |||
| 78e1f4ef72 | |||
| cf74b06b4a | |||
| 317898d51b | |||
| 790bb6fbb5 | |||
| 658df38788 | |||
| cff0f854fa | |||
| 66ade70273 | |||
| cbd2ac5b3e | |||
| 0433cd4d9b | |||
| b5a8ea2395 | |||
| 49b79799c1 | |||
| f4aba551a2 | |||
| 91df35dbd3 | |||
| 623f90e43f |
@@ -4,3 +4,7 @@ target/
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
.cargo/
|
||||
.sqlx/
|
||||
docs/
|
||||
dev.db
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
DATABASE_URL=sqlite:./dev.db
|
||||
BASE_URL=http://localhost:3000
|
||||
PORT=3000
|
||||
SECURE_COOKIES=false
|
||||
JWT_SECRET=
|
||||
JWT_TTL_SECONDS=
|
||||
ALLOW_REGISTRATION=true
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -8,6 +8,8 @@
|
||||
.env.prod
|
||||
|
||||
*.db
|
||||
*db-shm
|
||||
*db-wal
|
||||
|
||||
.worktrees/
|
||||
.superpowers/
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) AS \"total!: i64\",\n AVG(CAST(rating AS REAL)) AS avg_rating\n FROM reviews WHERE user_id = ?",
|
||||
"query": "SELECT COUNT(DISTINCT movie_id) AS \"total!: i64\",\n AVG(CAST(rating AS REAL)) AS avg_rating\n FROM reviews WHERE user_id = ?",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -22,5 +22,5 @@
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "d59e1a103fc56b9b4579add523f0f77b68500cf4c96002a4a17b1e40093504ba"
|
||||
"hash": "a01336632a54099e31686a9cbe6fc53fef1299fc7c7b52be44f99c2302490a22"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT u.id,\n u.email,\n COUNT(r.id) AS \"total_movies!: i64\",\n AVG(CAST(r.rating AS REAL)) AS avg_rating\n FROM users u\n LEFT JOIN reviews r ON r.user_id = u.id\n GROUP BY u.id, u.email\n ORDER BY u.email ASC",
|
||||
"query": "SELECT u.id,\n u.email,\n COUNT(DISTINCT r.movie_id) AS \"total_movies!: i64\",\n AVG(CAST(r.rating AS REAL)) AS avg_rating\n FROM users u\n LEFT JOIN reviews r ON r.user_id = u.id\n GROUP BY u.id, u.email\n ORDER BY u.email ASC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -34,5 +34,5 @@
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "41273bd5f2ad4e86bb2f60d7b3144279f2ae77a95a8ea61bbf3dbfef2d861dd8"
|
||||
"hash": "f259059d76f29cade94e249735d37ef4993fe5bff095dc43e681b848a398f318"
|
||||
}
|
||||
@@ -3,22 +3,21 @@ use domain::models::DiaryEntry;
|
||||
use rss_feed::{ChannelBuilder, GuidBuilder, ItemBuilder};
|
||||
|
||||
pub struct RssAdapter {
|
||||
feed_title: String,
|
||||
feed_link: String,
|
||||
}
|
||||
|
||||
impl RssAdapter {
|
||||
pub fn new(feed_title: String, feed_link: String) -> Self {
|
||||
Self { feed_title, feed_link }
|
||||
pub fn new(feed_link: String) -> Self {
|
||||
Self { feed_link }
|
||||
}
|
||||
}
|
||||
|
||||
impl RssFeedRenderer for RssAdapter {
|
||||
fn render_feed(&self, entries: &[DiaryEntry]) -> Result<String, String> {
|
||||
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String> {
|
||||
let items = entries
|
||||
.iter()
|
||||
.map(|e| {
|
||||
let title = format!(
|
||||
let item_title = format!(
|
||||
"{} ({})",
|
||||
e.movie().title().value(),
|
||||
e.movie().release_year().value()
|
||||
@@ -38,7 +37,7 @@ impl RssFeedRenderer for RssAdapter {
|
||||
.permalink(false)
|
||||
.build();
|
||||
ItemBuilder::default()
|
||||
.title(Some(title))
|
||||
.title(Some(item_title))
|
||||
.description(Some(description))
|
||||
.pub_date(Some(pub_date))
|
||||
.guid(Some(guid))
|
||||
@@ -47,12 +46,31 @@ impl RssFeedRenderer for RssAdapter {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let channel = ChannelBuilder::default()
|
||||
.title(self.feed_title.clone())
|
||||
.title(title.to_string())
|
||||
.link(self.feed_link.clone())
|
||||
.description(self.feed_title.clone())
|
||||
.description(title.to_string())
|
||||
.items(items)
|
||||
.build();
|
||||
|
||||
Ok(channel.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn render_feed_uses_provided_title() {
|
||||
let adapter = RssAdapter::new("http://example.com".into());
|
||||
let xml = adapter.render_feed(&[], "Custom Title").unwrap();
|
||||
assert!(xml.contains("<title>Custom Title</title>"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn render_feed_empty_entries_produces_valid_xml() {
|
||||
let adapter = RssAdapter::new("http://example.com".into());
|
||||
let xml = adapter.render_feed(&[], "My Feed").unwrap();
|
||||
assert!(xml.starts_with("<?xml") || xml.starts_with("<rss"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,7 +240,7 @@ impl SqliteMovieRepository {
|
||||
async fn fetch_user_totals(&self, user_id: &str) -> Result<UserTotalsRow, DomainError> {
|
||||
sqlx::query_as!(
|
||||
UserTotalsRow,
|
||||
r#"SELECT COUNT(*) AS "total!: i64",
|
||||
r#"SELECT COUNT(DISTINCT movie_id) AS "total!: i64",
|
||||
AVG(CAST(rating AS REAL)) AS avg_rating
|
||||
FROM reviews WHERE user_id = ?"#,
|
||||
user_id
|
||||
|
||||
@@ -104,7 +104,7 @@ impl UserRepository for SqliteUserRepository {
|
||||
UserSummaryRow,
|
||||
r#"SELECT u.id,
|
||||
u.email,
|
||||
COUNT(r.id) AS "total_movies!: i64",
|
||||
COUNT(DISTINCT r.movie_id) AS "total_movies!: i64",
|
||||
AVG(CAST(r.rating AS REAL)) AS avg_rating
|
||||
FROM users u
|
||||
LEFT JOIN reviews r ON r.user_id = u.id
|
||||
|
||||
@@ -59,7 +59,7 @@ struct UsersTemplate<'a> {
|
||||
|
||||
struct MonthlyRatingRow<'a> {
|
||||
rating: &'a MonthlyRating,
|
||||
bar_height_pct: i64,
|
||||
bar_height_px: i64,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
@@ -125,8 +125,8 @@ fn build_heatmap(history: &[MonthActivity]) -> Vec<HeatmapCell> {
|
||||
}).collect()
|
||||
}
|
||||
|
||||
fn bar_height_pct(avg_rating: f64) -> i64 {
|
||||
(avg_rating / 5.0 * 100.0) as i64
|
||||
fn bar_height_px(avg_rating: f64) -> i64 {
|
||||
(avg_rating / 5.0 * 60.0) as i64
|
||||
}
|
||||
|
||||
pub struct AskamaHtmlRenderer;
|
||||
@@ -207,7 +207,7 @@ impl HtmlRenderer for AskamaHtmlRenderer {
|
||||
.split('@').next().unwrap_or(&data.profile_user_email).to_string();
|
||||
let monthly_rating_rows: Vec<MonthlyRatingRow<'_>> = data.trends.as_ref()
|
||||
.map(|t| t.monthly_ratings.iter().map(|r| MonthlyRatingRow {
|
||||
bar_height_pct: bar_height_pct(r.avg_rating),
|
||||
bar_height_px: bar_height_px(r.avg_rating),
|
||||
rating: r,
|
||||
}).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
@@ -3,7 +3,15 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Movies Diary</title>
|
||||
<title>{{ ctx.page_title }}</title>
|
||||
<meta name="description" content="A personal movie diary — track what you watch, rate and review films.">
|
||||
<meta property="og:type" content="website">
|
||||
<meta property="og:site_name" content="Movies Diary">
|
||||
<meta property="og:title" content="{{ ctx.page_title }}">
|
||||
<meta property="og:url" content="{{ ctx.canonical_url }}">
|
||||
<meta name="twitter:card" content="summary">
|
||||
<meta name="twitter:title" content="{{ ctx.page_title }}">
|
||||
<link rel="canonical" href="{{ ctx.canonical_url }}">
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Nunito:wght@400;600;700;800&display=swap" rel="stylesheet">
|
||||
@@ -15,7 +23,7 @@
|
||||
<nav>
|
||||
<a href="/">Feed</a>
|
||||
<a href="/users">Users</a>
|
||||
<a href="/feed.rss">RSS</a>
|
||||
<a href="{{ ctx.rss_url }}">RSS</a>
|
||||
{% if let Some(email) = ctx.user_email %}
|
||||
<a href="/reviews/new">Add Review</a>
|
||||
<a href="/logout">Logout</a>
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
<div class="heatmap-label">Movies watched this year</div>
|
||||
<div class="heatmap">
|
||||
{% for cell in heatmap %}
|
||||
<div class="heatmap-cell" style="background: rgba(74, 158, 255, {{ cell.alpha }})">
|
||||
<div class="heatmap-cell" style="--alpha: {{ cell.alpha }}">
|
||||
<div class="heatmap-count">{{ cell.count }}</div>
|
||||
<div class="heatmap-month">{{ cell.month_label }}</div>
|
||||
</div>
|
||||
@@ -81,7 +81,8 @@
|
||||
<div class="bar-chart">
|
||||
{% for row in monthly_rating_rows %}
|
||||
<div class="bar-col">
|
||||
<div class="bar-fill" style="height: {{ row.bar_height_pct }}%"></div>
|
||||
<div class="bar-value">{{ "{:.1}"|format(row.rating.avg_rating) }}</div>
|
||||
<div class="bar-fill" style="height: {{ row.bar_height_px }}px"></div>
|
||||
<div class="bar-month">{{ row.rating.month_label }}</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#[derive(Clone)]
|
||||
pub struct AppConfig {
|
||||
pub allow_registration: bool,
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
impl AppConfig {
|
||||
@@ -8,6 +9,8 @@ impl AppConfig {
|
||||
let allow_registration = std::env::var("ALLOW_REGISTRATION")
|
||||
.map(|v| v == "true" || v == "1")
|
||||
.unwrap_or(false);
|
||||
Self { allow_registration }
|
||||
let base_url = std::env::var("BASE_URL")
|
||||
.unwrap_or_else(|_| "http://localhost:3000".to_string());
|
||||
Self { allow_registration, base_url }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,9 @@ pub struct HtmlPageContext {
|
||||
pub user_email: Option<String>,
|
||||
pub user_id: Option<Uuid>,
|
||||
pub register_enabled: bool,
|
||||
pub rss_url: String,
|
||||
pub page_title: String,
|
||||
pub canonical_url: String,
|
||||
}
|
||||
|
||||
impl HtmlPageContext {
|
||||
@@ -67,5 +70,5 @@ pub trait HtmlRenderer: Send + Sync {
|
||||
}
|
||||
|
||||
pub trait RssFeedRenderer: Send + Sync {
|
||||
fn render_feed(&self, entries: &[DiaryEntry]) -> Result<String, String>;
|
||||
fn render_feed(&self, entries: &[DiaryEntry], title: &str) -> Result<String, String>;
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ pub struct GetDiaryQuery {
|
||||
pub offset: Option<u32>,
|
||||
pub sort_by: Option<SortDirection>,
|
||||
pub movie_id: Option<Uuid>,
|
||||
pub user_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
pub struct GetReviewHistoryQuery {
|
||||
|
||||
@@ -4,7 +4,7 @@ use domain::{
|
||||
DiaryEntry, DiaryFilter, SortDirection,
|
||||
collections::{PageParams, Paginated},
|
||||
},
|
||||
value_objects::MovieId,
|
||||
value_objects::{MovieId, UserId},
|
||||
};
|
||||
|
||||
use crate::{context::AppContext, queries::GetDiaryQuery};
|
||||
@@ -14,17 +14,15 @@ pub async fn execute(
|
||||
query: GetDiaryQuery,
|
||||
) -> Result<Paginated<DiaryEntry>, DomainError> {
|
||||
let page = PageParams::new(query.limit, query.offset)?;
|
||||
|
||||
let movie_id = query.movie_id.map(MovieId::from_uuid);
|
||||
let user_id = query.user_id.map(UserId::from_uuid);
|
||||
|
||||
let filter = DiaryFilter {
|
||||
sort_by: query.sort_by.unwrap_or(SortDirection::Descending),
|
||||
page,
|
||||
movie_id,
|
||||
user_id: None,
|
||||
user_id,
|
||||
};
|
||||
|
||||
let paginated_results = ctx.repository.query_diary(&filter).await?;
|
||||
|
||||
Ok(paginated_results)
|
||||
ctx.repository.query_diary(&filter).await
|
||||
}
|
||||
|
||||
@@ -2,15 +2,25 @@ use domain::{errors::DomainError, models::User, value_objects::Email};
|
||||
|
||||
use crate::{commands::RegisterCommand, context::AppContext};
|
||||
|
||||
const MIN_PASSWORD_LENGTH: usize = 8;
|
||||
|
||||
pub async fn execute(ctx: &AppContext, cmd: RegisterCommand) -> Result<(), DomainError> {
|
||||
if !ctx.config.allow_registration {
|
||||
return Err(DomainError::Unauthorized("Registration is disabled".into()));
|
||||
}
|
||||
|
||||
if cmd.password.len() < MIN_PASSWORD_LENGTH {
|
||||
return Err(DomainError::ValidationError(
|
||||
"Password must be at least 8 characters".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let email = Email::new(cmd.email)?;
|
||||
|
||||
if ctx.user_repository.find_by_email(&email).await?.is_some() {
|
||||
return Err(DomainError::ValidationError("Email already registered".into()));
|
||||
return Err(DomainError::ValidationError(
|
||||
"Email already registered".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let hash = ctx.password_hasher.hash(&cmd.password).await?;
|
||||
|
||||
@@ -133,14 +133,6 @@ impl Review {
|
||||
comment: Option<Comment>,
|
||||
watched_at: NaiveDateTime,
|
||||
) -> Result<Self, DomainError> {
|
||||
let now = Utc::now().naive_utc();
|
||||
|
||||
if watched_at > now {
|
||||
return Err(DomainError::ValidationError(
|
||||
"watched_at cannot be in the future".into(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
id: ReviewId::generate(),
|
||||
movie_id,
|
||||
@@ -148,7 +140,7 @@ impl Review {
|
||||
rating,
|
||||
comment,
|
||||
watched_at,
|
||||
created_at: now,
|
||||
created_at: Utc::now().naive_utc(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -219,6 +219,7 @@ impl From<DiaryQueryParams> for GetDiaryQuery {
|
||||
}
|
||||
}),
|
||||
movie_id: p.movie_id,
|
||||
user_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -160,7 +160,7 @@ mod tests {
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(PanicUserRepo),
|
||||
config: AppConfig { allow_registration: false },
|
||||
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -153,7 +153,7 @@ mod tests {
|
||||
|
||||
struct PanicRssRenderer;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() }
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
|
||||
struct PanicMeta; struct PanicFetcher; struct PanicStorage; struct PanicEvent; struct PanicHasher; struct PanicAuth; struct PanicUserRepo;
|
||||
@@ -175,7 +175,7 @@ mod tests {
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(PanicUserRepo),
|
||||
config: application::config::AppConfig { allow_registration: false },
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer),
|
||||
rss_renderer: Arc::new(PanicRssRenderer),
|
||||
@@ -269,7 +269,7 @@ mod tests {
|
||||
}
|
||||
struct PanicRssRenderer2;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer2 {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() }
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
struct PanicAuth2;
|
||||
crate::state::AppState {
|
||||
@@ -282,7 +282,7 @@ mod tests {
|
||||
auth_service: Arc::new(PanicAuth2),
|
||||
password_hasher: Arc::new(PanicHasher2),
|
||||
user_repository: Arc::new(PanicUserRepo2),
|
||||
config: application::config::AppConfig { allow_registration: false },
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer2),
|
||||
rss_renderer: Arc::new(PanicRssRenderer2),
|
||||
@@ -329,7 +329,7 @@ mod tests {
|
||||
}
|
||||
struct PanicRssRenderer3;
|
||||
impl crate::ports::RssFeedRenderer for PanicRssRenderer3 {
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry]) -> Result<String, String> { panic!() }
|
||||
fn render_feed(&self, _: &[domain::models::DiaryEntry], _: &str) -> Result<String, String> { panic!() }
|
||||
}
|
||||
crate::state::AppState {
|
||||
app_ctx: AppContext {
|
||||
@@ -341,7 +341,7 @@ mod tests {
|
||||
auth_service: Arc::new(RejectingAuth),
|
||||
password_hasher: Arc::new(PanicHasher3),
|
||||
user_repository: Arc::new(PanicUserRepo3),
|
||||
config: application::config::AppConfig { allow_registration: false },
|
||||
config: application::config::AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(PanicRenderer3),
|
||||
rss_renderer: Arc::new(PanicRssRenderer3),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const DEFAULT_PAGE_LIMIT: u32 = 5;
|
||||
const RSS_FEED_LIMIT: u32 = 50;
|
||||
|
||||
pub mod html {
|
||||
use axum::{
|
||||
@@ -41,6 +42,9 @@ pub mod html {
|
||||
user_email,
|
||||
user_id: uuid,
|
||||
register_enabled: state.app_ctx.config.allow_registration,
|
||||
rss_url: "/feed.rss".to_string(),
|
||||
page_title: "Movies Diary".to_string(),
|
||||
canonical_url: state.app_ctx.config.base_url.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,10 +55,14 @@ pub mod html {
|
||||
.replace('"', "%22")
|
||||
}
|
||||
|
||||
fn secure_flag() -> &'static str {
|
||||
if std::env::var("SECURE_COOKIES").as_deref() == Ok("true") { "; Secure" } else { "" }
|
||||
}
|
||||
|
||||
fn set_cookie_header(token: &str, max_age: i64) -> (axum::http::HeaderName, HeaderValue) {
|
||||
let val = format!(
|
||||
"token={}; HttpOnly; Path=/; SameSite=Lax; Max-Age={}",
|
||||
token, max_age
|
||||
"token={}; HttpOnly; Path=/; SameSite=Strict; Max-Age={}{}",
|
||||
token, max_age, secure_flag()
|
||||
);
|
||||
(SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"))
|
||||
}
|
||||
@@ -67,6 +75,9 @@ pub mod html {
|
||||
user_email: None,
|
||||
user_id: None,
|
||||
register_enabled: state.app_ctx.config.allow_registration,
|
||||
rss_url: "/feed.rss".to_string(),
|
||||
page_title: "Login — Movies Diary".to_string(),
|
||||
canonical_url: format!("{}/login", state.app_ctx.config.base_url),
|
||||
};
|
||||
let html = state
|
||||
.html_renderer
|
||||
@@ -101,10 +112,8 @@ pub mod html {
|
||||
}
|
||||
|
||||
pub async fn get_logout() -> impl IntoResponse {
|
||||
let cookie = (
|
||||
SET_COOKIE,
|
||||
HeaderValue::from_static("token=; HttpOnly; Path=/; SameSite=Lax; Max-Age=0"),
|
||||
);
|
||||
let val = format!("token=; HttpOnly; Path=/; SameSite=Strict; Max-Age=0{}", secure_flag());
|
||||
let cookie = (SET_COOKIE, HeaderValue::from_str(&val).expect("valid cookie"));
|
||||
([cookie], Redirect::to("/")).into_response()
|
||||
}
|
||||
|
||||
@@ -119,6 +128,9 @@ pub mod html {
|
||||
user_email: None,
|
||||
user_id: None,
|
||||
register_enabled: true,
|
||||
rss_url: "/feed.rss".to_string(),
|
||||
page_title: "Register — Movies Diary".to_string(),
|
||||
canonical_url: format!("{}/register", state.app_ctx.config.base_url),
|
||||
};
|
||||
let html = state
|
||||
.html_renderer
|
||||
@@ -158,9 +170,8 @@ pub mod html {
|
||||
Err(_) => Redirect::to("/login").into_response(),
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = encode_error(&e.to_string());
|
||||
Redirect::to(&format!("/register?error={}", msg)).into_response()
|
||||
Err(_) => {
|
||||
Redirect::to("/register?error=Registration+failed.+Please+try+again.").into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,7 +181,9 @@ pub mod html {
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ErrorQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let ctx = build_page_context(&state, Some(user_id)).await;
|
||||
let mut ctx = build_page_context(&state, Some(user_id)).await;
|
||||
ctx.page_title = "Log a Review — Movies Diary".to_string();
|
||||
ctx.canonical_url = format!("{}/reviews/new", state.app_ctx.config.base_url);
|
||||
let html = state
|
||||
.html_renderer
|
||||
.render_new_review_page(NewReviewPageData {
|
||||
@@ -257,7 +270,9 @@ pub mod html {
|
||||
OptionalCookieUser(user_id): OptionalCookieUser,
|
||||
State(state): State<AppState>,
|
||||
) -> impl IntoResponse {
|
||||
let ctx = build_page_context(&state, user_id).await;
|
||||
let mut ctx = build_page_context(&state, user_id).await;
|
||||
ctx.page_title = "Members — Movies Diary".to_string();
|
||||
ctx.canonical_url = format!("{}/users", state.app_ctx.config.base_url);
|
||||
match application::use_cases::get_users::execute(&state.app_ctx, application::queries::GetUsersQuery).await {
|
||||
Ok(users) => {
|
||||
let data = application::ports::UsersPageData { ctx, users };
|
||||
@@ -276,7 +291,7 @@ pub mod html {
|
||||
Path(profile_user_uuid): Path<Uuid>,
|
||||
Query(params): Query<crate::dtos::ProfileQueryParams>,
|
||||
) -> impl IntoResponse {
|
||||
let ctx = build_page_context(&state, user_id).await;
|
||||
let mut ctx = build_page_context(&state, user_id).await;
|
||||
let view = params.view.unwrap_or_else(|| "recent".to_string());
|
||||
|
||||
let profile_user = match state.app_ctx.user_repository
|
||||
@@ -288,6 +303,11 @@ pub mod html {
|
||||
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
|
||||
};
|
||||
|
||||
let display_name = profile_user.email().value()
|
||||
.split('@').next().unwrap_or("User");
|
||||
ctx.page_title = format!("{}'s Diary — Movies Diary", display_name);
|
||||
ctx.canonical_url = format!("{}/users/{}", state.app_ctx.config.base_url, profile_user_uuid);
|
||||
|
||||
let query = application::queries::GetUserProfileQuery {
|
||||
user_id: profile_user_uuid,
|
||||
view: view.clone(),
|
||||
@@ -303,6 +323,7 @@ pub mod html {
|
||||
(e.offset, has_more, e.limit)
|
||||
})
|
||||
.unwrap_or((0, false, super::DEFAULT_PAGE_LIMIT));
|
||||
ctx.rss_url = format!("/users/{}/feed.rss", profile_user_uuid);
|
||||
let data = application::ports::ProfilePageData {
|
||||
ctx,
|
||||
profile_user_id: profile_user_uuid,
|
||||
@@ -359,30 +380,64 @@ pub mod posters {
|
||||
|
||||
pub mod rss {
|
||||
use axum::{
|
||||
extract::State,
|
||||
extract::{Path, State},
|
||||
http::header,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
use application::{queries::GetDiaryQuery, use_cases::get_diary};
|
||||
use domain::{errors::DomainError, models::SortDirection};
|
||||
use domain::{errors::DomainError, models::SortDirection, value_objects::UserId};
|
||||
|
||||
use crate::{errors::ApiError, state::AppState};
|
||||
|
||||
pub async fn get_feed(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> {
|
||||
let query = GetDiaryQuery {
|
||||
limit: Some(50),
|
||||
limit: Some(super::RSS_FEED_LIMIT),
|
||||
offset: Some(0),
|
||||
sort_by: Some(SortDirection::Descending),
|
||||
movie_id: None,
|
||||
user_id: None,
|
||||
};
|
||||
let page = get_diary::execute(&state.app_ctx, query).await?;
|
||||
let xml = state
|
||||
.rss_renderer
|
||||
.render_feed(&page.items)
|
||||
.render_feed(&page.items, "Movie Diary")
|
||||
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
|
||||
Ok(([(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml))
|
||||
}
|
||||
|
||||
pub async fn get_user_feed(
|
||||
State(state): State<AppState>,
|
||||
Path(user_id): Path<Uuid>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let user = state
|
||||
.app_ctx
|
||||
.user_repository
|
||||
.find_by_id(&UserId::from_uuid(user_id))
|
||||
.await
|
||||
.map_err(ApiError)?
|
||||
.ok_or_else(|| ApiError(DomainError::NotFound(format!("User {user_id}"))))?;
|
||||
|
||||
let query = GetDiaryQuery {
|
||||
limit: Some(super::RSS_FEED_LIMIT),
|
||||
offset: Some(0),
|
||||
sort_by: Some(SortDirection::Descending),
|
||||
movie_id: None,
|
||||
user_id: Some(user_id),
|
||||
};
|
||||
let page = get_diary::execute(&state.app_ctx, query).await?;
|
||||
|
||||
let display_name = user.email().value().split('@').next().unwrap_or("User");
|
||||
let title = format!("{}'s Movie Diary", display_name);
|
||||
|
||||
let xml = state
|
||||
.rss_renderer
|
||||
.render_feed(&page.items, &title)
|
||||
.map_err(|e| ApiError(DomainError::InfrastructureError(e)))?;
|
||||
|
||||
Ok(([(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml))
|
||||
}
|
||||
}
|
||||
|
||||
pub mod api {
|
||||
|
||||
@@ -32,8 +32,11 @@ async fn main() -> anyhow::Result<()> {
|
||||
|
||||
let app = routes::build_router(state);
|
||||
|
||||
let listener = TcpListener::bind("0.0.0.0:3000").await?;
|
||||
tracing::info!("Listening on 0.0.0.0:3000");
|
||||
let host = std::env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string());
|
||||
let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string());
|
||||
let addr = format!("{}:{}", host, port);
|
||||
let listener = TcpListener::bind(&addr).await?;
|
||||
tracing::info!("Listening on {}", addr);
|
||||
axum::serve(listener, app).await?;
|
||||
|
||||
Ok(())
|
||||
@@ -48,7 +51,9 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
||||
let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL must be set")?;
|
||||
let opts = SqliteConnectOptions::from_str(&database_url)
|
||||
.context("Invalid DATABASE_URL")?
|
||||
.create_if_missing(true);
|
||||
.create_if_missing(true)
|
||||
.journal_mode(sqlx::sqlite::SqliteJournalMode::Wal)
|
||||
.busy_timeout(std::time::Duration::from_secs(5));
|
||||
let pool = SqlitePool::connect_with(opts)
|
||||
.await
|
||||
.context("Failed to connect to SQLite database")?;
|
||||
@@ -109,8 +114,7 @@ async fn wire_dependencies() -> anyhow::Result<AppState> {
|
||||
app_ctx,
|
||||
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
|
||||
rss_renderer: Arc::new(RssAdapter::new(
|
||||
"Movie Diary".into(),
|
||||
"http://localhost:3000".into(),
|
||||
std::env::var("BASE_URL").unwrap_or_else(|_| "http://localhost:3000".into()),
|
||||
)),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,8 +1,52 @@
|
||||
use axum::{Router, routing};
|
||||
use std::sync::{
|
||||
Arc,
|
||||
atomic::{AtomicU64, Ordering},
|
||||
};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use axum::{Router, http::StatusCode, middleware, response::IntoResponse, routing};
|
||||
use tower_http::{services::ServeDir, trace::TraceLayer};
|
||||
|
||||
use crate::{handlers, state::AppState};
|
||||
|
||||
const API_RATE_LIMIT: u64 = 20; // 20 requests per minute globally for API routes
|
||||
|
||||
/// Simple global rate limiter: tracks request count per 60-second window.
|
||||
/// Not per-IP — suitable for a low-traffic personal app.
|
||||
#[derive(Clone)]
|
||||
struct RateLimiter {
|
||||
window: Arc<AtomicU64>,
|
||||
count: Arc<AtomicU64>,
|
||||
limit: u64,
|
||||
}
|
||||
|
||||
impl RateLimiter {
|
||||
fn new(limit: u64) -> Self {
|
||||
Self {
|
||||
window: Arc::new(AtomicU64::new(0)),
|
||||
count: Arc::new(AtomicU64::new(0)),
|
||||
limit,
|
||||
}
|
||||
}
|
||||
|
||||
fn check(&self) -> bool {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs()
|
||||
/ 60;
|
||||
let prev = self.window.load(Ordering::Acquire);
|
||||
if now != prev {
|
||||
// compare_exchange ensures only one thread wins the window reset
|
||||
if self.window.compare_exchange(prev, now, Ordering::AcqRel, Ordering::Relaxed).is_ok() {
|
||||
self.count.store(1, Ordering::Release);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
self.count.fetch_add(1, Ordering::Relaxed) + 1 <= self.limit
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_router(state: AppState) -> Router {
|
||||
Router::new()
|
||||
.merge(html_routes())
|
||||
@@ -13,29 +57,73 @@ pub fn build_router(state: AppState) -> Router {
|
||||
}
|
||||
|
||||
fn html_routes() -> Router<AppState> {
|
||||
Router::new()
|
||||
.route("/", routing::get(handlers::html::get_activity_feed))
|
||||
.route("/users", routing::get(handlers::html::get_users_list))
|
||||
.route("/users/{id}", routing::get(handlers::html::get_user_profile))
|
||||
// Auth routes: 20 requests per minute globally.
|
||||
let limiter = RateLimiter::new(API_RATE_LIMIT);
|
||||
let auth = Router::new()
|
||||
.route(
|
||||
"/login",
|
||||
routing::get(handlers::html::get_login_page)
|
||||
.post(handlers::html::post_login),
|
||||
routing::get(handlers::html::get_login_page).post(handlers::html::post_login),
|
||||
)
|
||||
.route("/logout", routing::get(handlers::html::get_logout))
|
||||
.route(
|
||||
"/register",
|
||||
routing::get(handlers::html::get_register_page)
|
||||
.post(handlers::html::post_register),
|
||||
routing::get(handlers::html::get_register_page).post(handlers::html::post_register),
|
||||
)
|
||||
.route_layer(middleware::from_fn(
|
||||
move |req: axum::extract::Request, next: middleware::Next| {
|
||||
let limiter = limiter.clone();
|
||||
async move {
|
||||
if limiter.check() {
|
||||
next.run(req).await
|
||||
} else {
|
||||
StatusCode::TOO_MANY_REQUESTS.into_response()
|
||||
}
|
||||
}
|
||||
},
|
||||
));
|
||||
|
||||
Router::new()
|
||||
.route("/", routing::get(handlers::html::get_activity_feed))
|
||||
.route("/users", routing::get(handlers::html::get_users_list))
|
||||
.route(
|
||||
"/users/{id}",
|
||||
routing::get(handlers::html::get_user_profile),
|
||||
)
|
||||
.merge(auth)
|
||||
.route(
|
||||
"/reviews/new",
|
||||
routing::get(handlers::html::get_new_review_page),
|
||||
)
|
||||
.route("/reviews/new", routing::get(handlers::html::get_new_review_page))
|
||||
.route("/reviews", routing::post(handlers::html::post_review))
|
||||
.route("/reviews/{id}/delete", routing::post(handlers::html::post_delete_review))
|
||||
.route("/posters/{path}", routing::get(handlers::posters::get_poster))
|
||||
.route(
|
||||
"/reviews/{id}/delete",
|
||||
routing::post(handlers::html::post_delete_review),
|
||||
)
|
||||
.route(
|
||||
"/posters/{path}",
|
||||
routing::get(handlers::posters::get_poster),
|
||||
)
|
||||
.route("/feed.rss", routing::get(handlers::rss::get_feed))
|
||||
.route(
|
||||
"/users/{id}/feed.rss",
|
||||
routing::get(handlers::rss::get_user_feed),
|
||||
)
|
||||
}
|
||||
|
||||
fn api_routes() -> Router<AppState> {
|
||||
let limiter = RateLimiter::new(API_RATE_LIMIT);
|
||||
let auth_rate_limit =
|
||||
middleware::from_fn(move |req: axum::extract::Request, next: middleware::Next| {
|
||||
let limiter = limiter.clone();
|
||||
async move {
|
||||
if limiter.check() {
|
||||
next.run(req).await
|
||||
} else {
|
||||
StatusCode::TOO_MANY_REQUESTS.into_response()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Router::new().nest(
|
||||
"/api",
|
||||
Router::new()
|
||||
@@ -45,12 +133,16 @@ fn api_routes() -> Router<AppState> {
|
||||
routing::get(handlers::api::get_review_history),
|
||||
)
|
||||
.route("/reviews", routing::post(handlers::api::post_review))
|
||||
.route("/reviews/{id}", routing::delete(handlers::api::delete_review))
|
||||
.route(
|
||||
"/reviews/{id}",
|
||||
routing::delete(handlers::api::delete_review),
|
||||
)
|
||||
.route(
|
||||
"/movies/{id}/sync-poster",
|
||||
routing::post(handlers::api::sync_poster),
|
||||
)
|
||||
.route("/auth/login", routing::post(handlers::api::login))
|
||||
.route("/auth/register", routing::post(handlers::api::register)),
|
||||
.route("/auth/register", routing::post(handlers::api::register))
|
||||
.route_layer(auth_rate_limit),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -105,10 +105,10 @@ async fn test_app() -> Router {
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(NobodyUserRepo),
|
||||
config: AppConfig { allow_registration: false },
|
||||
config: AppConfig { allow_registration: false, base_url: "http://localhost:3000".to_string() },
|
||||
},
|
||||
html_renderer: Arc::new(AskamaHtmlRenderer::new()),
|
||||
rss_renderer: Arc::new(RssAdapter::new("Movie Diary".into(), "http://localhost:3000".into())),
|
||||
rss_renderer: Arc::new(RssAdapter::new("http://localhost:3000".into())),
|
||||
};
|
||||
|
||||
routes::build_router(state)
|
||||
|
||||
@@ -1,620 +0,0 @@
|
||||
# Event-Driven Poster Sync Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Add an `EventHandler` trait to the event-publisher adapter and implement `PosterSyncHandler` so that a `MovieDiscovered` event automatically triggers the existing `sync_poster` use case with exponential-backoff retry.
|
||||
|
||||
**Architecture:** `EventWorker` gains a `Vec<Box<dyn EventHandler>>` and fans out each received event to all registered handlers sequentially. `PosterSyncHandler` lives in the `presentation` crate (composition root), holds `AppContext`, and calls `sync_poster::execute` on `MovieDiscovered` events — ignoring all others. Retry is up to 3 retries (4 total attempts) with delays 1s → 2s → 4s.
|
||||
|
||||
**Tech Stack:** Rust, tokio::sync::mpsc, async-trait, existing `sync_poster` use case
|
||||
|
||||
---
|
||||
|
||||
## File Map
|
||||
|
||||
| File | Status | Responsibility |
|
||||
|---|---|---|
|
||||
| `crates/adapters/event-publisher/src/lib.rs` | Modify | Add `EventHandler` trait; extend `EventWorker` and `create_event_channel` |
|
||||
| `crates/application/src/commands.rs` | Modify | Add `#[derive(Clone)]` to `SyncPosterCommand` |
|
||||
| `crates/presentation/src/lib.rs` | Modify | Expose `pub mod event_handlers` |
|
||||
| `crates/presentation/src/event_handlers.rs` | Create | `PosterSyncHandler` implementation |
|
||||
| `crates/presentation/src/main.rs` | Modify | Wire `PosterSyncHandler` into `create_event_channel` |
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Add `EventHandler` trait and update `EventWorker`
|
||||
|
||||
**Files:**
|
||||
- Modify: `crates/adapters/event-publisher/src/lib.rs`
|
||||
|
||||
- [ ] **Step 1: Write the failing test**
|
||||
|
||||
Add to the bottom of `crates/adapters/event-publisher/src/lib.rs`:
|
||||
|
||||
```rust
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use async_trait::async_trait;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
value_objects::{ExternalMetadataId, MovieId},
|
||||
};
|
||||
|
||||
struct RecordingHandler {
|
||||
calls: Arc<Mutex<Vec<String>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventHandler for RecordingHandler {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
let label = match event {
|
||||
DomainEvent::MovieDiscovered { .. } => "movie_discovered",
|
||||
DomainEvent::ReviewLogged { .. } => "review_logged",
|
||||
};
|
||||
self.calls.lock().unwrap().push(label.to_string());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn single_handler_receives_event() {
|
||||
let calls = Arc::new(Mutex::new(vec![]));
|
||||
let handler = RecordingHandler { calls: Arc::clone(&calls) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(config, vec![Box::new(handler)]);
|
||||
|
||||
tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt1234567".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||
|
||||
assert_eq!(*calls.lock().unwrap(), vec!["movie_discovered"]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn multiple_handlers_all_receive_event() {
|
||||
let calls1 = Arc::new(Mutex::new(vec![]));
|
||||
let calls2 = Arc::new(Mutex::new(vec![]));
|
||||
let handler1 = RecordingHandler { calls: Arc::clone(&calls1) };
|
||||
let handler2 = RecordingHandler { calls: Arc::clone(&calls2) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(
|
||||
config,
|
||||
vec![Box::new(handler1), Box::new(handler2)],
|
||||
);
|
||||
|
||||
tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt9999999".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||
|
||||
assert_eq!(calls1.lock().unwrap().len(), 1);
|
||||
assert_eq!(calls2.lock().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn handler_error_does_not_stop_worker() {
|
||||
struct FailingHandler;
|
||||
#[async_trait]
|
||||
impl EventHandler for FailingHandler {
|
||||
async fn handle(&self, _: &DomainEvent) -> Result<(), DomainError> {
|
||||
Err(DomainError::InfrastructureError("boom".into()))
|
||||
}
|
||||
}
|
||||
|
||||
let calls = Arc::new(Mutex::new(vec![]));
|
||||
let good = RecordingHandler { calls: Arc::clone(&calls) };
|
||||
let config = EventPublisherConfig { channel_buffer: 8 };
|
||||
let (publisher, worker) = create_event_channel(
|
||||
config,
|
||||
vec![Box::new(FailingHandler), Box::new(good)],
|
||||
);
|
||||
|
||||
tokio::spawn(worker.run());
|
||||
|
||||
let event = DomainEvent::MovieDiscovered {
|
||||
movie_id: MovieId::generate(),
|
||||
external_metadata_id: ExternalMetadataId::new("tt0000001".into()).unwrap(),
|
||||
};
|
||||
publisher.publish(&event).await.unwrap();
|
||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||
|
||||
// good handler still ran despite failing handler before it
|
||||
assert_eq!(calls.lock().unwrap().len(), 1);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run tests to verify they fail**
|
||||
|
||||
```bash
|
||||
cargo test -p event-publisher 2>&1 | tail -20
|
||||
```
|
||||
|
||||
Expected: compile errors — `EventHandler` not defined, `create_event_channel` wrong arity.
|
||||
|
||||
- [ ] **Step 3: Replace `lib.rs` with updated implementation**
|
||||
|
||||
Replace the full content of `crates/adapters/event-publisher/src/lib.rs` with:
|
||||
|
||||
```rust
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, events::DomainEvent, ports::EventPublisher};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
pub struct EventPublisherConfig {
|
||||
pub channel_buffer: usize,
|
||||
}
|
||||
|
||||
impl EventPublisherConfig {
|
||||
pub fn from_env() -> Self {
|
||||
let channel_buffer = std::env::var("EVENT_CHANNEL_BUFFER")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(128);
|
||||
Self { channel_buffer }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EventHandler: Send + Sync {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||
}
|
||||
|
||||
pub struct ChannelEventPublisher {
|
||||
sender: mpsc::Sender<DomainEvent>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for ChannelEventPublisher {
|
||||
async fn publish(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
self.sender
|
||||
.send(event.clone())
|
||||
.await
|
||||
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EventWorker {
|
||||
receiver: mpsc::Receiver<DomainEvent>,
|
||||
handlers: Vec<Box<dyn EventHandler>>,
|
||||
}
|
||||
|
||||
impl EventWorker {
|
||||
pub async fn run(mut self) {
|
||||
while let Some(event) = self.receiver.recv().await {
|
||||
match &event {
|
||||
DomainEvent::ReviewLogged {
|
||||
review_id,
|
||||
movie_id,
|
||||
user_id,
|
||||
rating,
|
||||
watched_at,
|
||||
} => {
|
||||
tracing::info!(
|
||||
review_id = %review_id.value(),
|
||||
movie_id = %movie_id.value(),
|
||||
user_id = %user_id.value(),
|
||||
rating = rating.value(),
|
||||
watched_at = %watched_at,
|
||||
"event: review_logged"
|
||||
);
|
||||
}
|
||||
DomainEvent::MovieDiscovered {
|
||||
movie_id,
|
||||
external_metadata_id,
|
||||
} => {
|
||||
tracing::info!(
|
||||
movie_id = %movie_id.value(),
|
||||
external_id = external_metadata_id.value(),
|
||||
"event: movie_discovered"
|
||||
);
|
||||
}
|
||||
}
|
||||
for handler in &self.handlers {
|
||||
if let Err(e) = handler.handle(&event).await {
|
||||
tracing::error!("event handler error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
tracing::info!("event worker shut down");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_event_channel(
|
||||
config: EventPublisherConfig,
|
||||
handlers: Vec<Box<dyn EventHandler>>,
|
||||
) -> (ChannelEventPublisher, EventWorker) {
|
||||
let (tx, rx) = mpsc::channel(config.channel_buffer);
|
||||
(
|
||||
ChannelEventPublisher { sender: tx },
|
||||
EventWorker {
|
||||
receiver: rx,
|
||||
handlers,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
// paste the test module from Step 1 here
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Run tests to verify they pass**
|
||||
|
||||
```bash
|
||||
cargo test -p event-publisher 2>&1 | tail -20
|
||||
```
|
||||
|
||||
Expected: `test result: ok. 3 passed`
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/adapters/event-publisher/src/lib.rs
|
||||
git commit -m "feat(event-publisher): add EventHandler trait and fan-out in EventWorker"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Derive `Clone` on `SyncPosterCommand`
|
||||
|
||||
**Files:**
|
||||
- Modify: `crates/application/src/commands.rs`
|
||||
|
||||
The `PosterSyncHandler` retry loop reconstructs the command on each attempt, which requires `Clone` on `String` (already impl'd) and `Uuid` (Copy) — but it's cleaner to `#[derive(Clone)]` directly.
|
||||
|
||||
- [ ] **Step 1: Add `#[derive(Clone)]` to `SyncPosterCommand`**
|
||||
|
||||
In `crates/application/src/commands.rs`, find the `SyncPosterCommand` struct (line ~17) and add the derive:
|
||||
|
||||
```rust
|
||||
#[derive(Clone)]
|
||||
pub struct SyncPosterCommand {
|
||||
pub movie_id: Uuid,
|
||||
pub external_metadata_id: String,
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Verify it compiles**
|
||||
|
||||
```bash
|
||||
cargo build -p application 2>&1 | tail -10
|
||||
```
|
||||
|
||||
Expected: clean build.
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/application/src/commands.rs
|
||||
git commit -m "feat(application): derive Clone on SyncPosterCommand"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Implement `PosterSyncHandler`
|
||||
|
||||
**Files:**
|
||||
- Create: `crates/presentation/src/event_handlers.rs`
|
||||
- Modify: `crates/presentation/src/lib.rs`
|
||||
|
||||
- [ ] **Step 1: Write the failing test first — create `event_handlers.rs` with tests only**
|
||||
|
||||
Create `crates/presentation/src/event_handlers.rs`:
|
||||
|
||||
```rust
|
||||
use std::time::Duration;
|
||||
|
||||
use application::{commands::SyncPosterCommand, context::AppContext, use_cases::sync_poster};
|
||||
use async_trait::async_trait;
|
||||
use domain::{errors::DomainError, events::DomainEvent};
|
||||
use event_publisher::EventHandler;
|
||||
|
||||
pub struct PosterSyncHandler {
|
||||
ctx: AppContext,
|
||||
max_retries: u32,
|
||||
}
|
||||
|
||||
impl PosterSyncHandler {
|
||||
pub fn new(ctx: AppContext, max_retries: u32) -> Self {
|
||||
Self { ctx, max_retries }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventHandler for PosterSyncHandler {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::sync::Arc;
|
||||
use async_trait::async_trait;
|
||||
use application::config::AppConfig;
|
||||
use domain::{
|
||||
errors::DomainError,
|
||||
events::DomainEvent,
|
||||
models::{DiaryEntry, DiaryFilter, Movie, Review, ReviewHistory, User, collections::Paginated},
|
||||
ports::{
|
||||
AuthService, EventPublisher, GeneratedToken, MetadataClient, MetadataSearchCriteria,
|
||||
MovieRepository, PasswordHasher, PosterFetcherClient, PosterStorage, UserRepository,
|
||||
},
|
||||
value_objects::{
|
||||
Email, ExternalMetadataId, MovieId, MovieTitle, PasswordHash, PosterPath, PosterUrl,
|
||||
Rating, ReleaseYear, ReviewId, UserId,
|
||||
},
|
||||
};
|
||||
|
||||
// Panic stubs — never called in the "ignored event" test path
|
||||
struct PanicRepo;
|
||||
struct PanicMetadata;
|
||||
struct PanicFetcher;
|
||||
struct PanicStorage;
|
||||
struct PanicAuth;
|
||||
struct PanicHasher;
|
||||
struct PanicUserRepo;
|
||||
struct NoopPublisher;
|
||||
|
||||
#[async_trait]
|
||||
impl MovieRepository for PanicRepo {
|
||||
async fn get_movie_by_external_id(&self, _: &ExternalMetadataId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn get_movie_by_id(&self, _: &MovieId) -> Result<Option<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn get_movies_by_title_and_year(&self, _: &MovieTitle, _: &ReleaseYear) -> Result<Vec<Movie>, DomainError> { panic!("unexpected") }
|
||||
async fn upsert_movie(&self, _: &Movie) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
async fn save_review(&self, _: &Review) -> Result<DomainEvent, DomainError> { panic!("unexpected") }
|
||||
async fn query_diary(&self, _: &DiaryFilter) -> Result<Paginated<DiaryEntry>, DomainError> { panic!("unexpected") }
|
||||
async fn get_review_history(&self, _: &MovieId) -> Result<ReviewHistory, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl MetadataClient for PanicMetadata {
|
||||
async fn fetch_movie_metadata(&self, _: &MetadataSearchCriteria) -> Result<Movie, DomainError> { panic!("unexpected") }
|
||||
async fn get_poster_url(&self, _: &ExternalMetadataId) -> Result<Option<PosterUrl>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterFetcherClient for PanicFetcher {
|
||||
async fn fetch_poster_bytes(&self, _: &PosterUrl) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PosterStorage for PanicStorage {
|
||||
async fn store_poster(&self, _: &MovieId, _: &[u8]) -> Result<PosterPath, DomainError> { panic!("unexpected") }
|
||||
async fn get_poster(&self, _: &PosterPath) -> Result<Vec<u8>, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl AuthService for PanicAuth {
|
||||
async fn generate_token(&self, _: &UserId) -> Result<GeneratedToken, DomainError> { panic!("unexpected") }
|
||||
async fn validate_token(&self, _: &str) -> Result<UserId, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PasswordHasher for PanicHasher {
|
||||
async fn hash(&self, _: &str) -> Result<PasswordHash, DomainError> { panic!("unexpected") }
|
||||
async fn verify(&self, _: &str, _: &PasswordHash) -> Result<bool, DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl UserRepository for PanicUserRepo {
|
||||
async fn find_by_email(&self, _: &Email) -> Result<Option<User>, DomainError> { panic!("unexpected") }
|
||||
async fn save(&self, _: &User) -> Result<(), DomainError> { panic!("unexpected") }
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EventPublisher for NoopPublisher {
|
||||
async fn publish(&self, _: &DomainEvent) -> Result<(), DomainError> { Ok(()) }
|
||||
}
|
||||
|
||||
fn panic_ctx() -> AppContext {
|
||||
AppContext {
|
||||
repository: Arc::new(PanicRepo),
|
||||
metadata_client: Arc::new(PanicMetadata),
|
||||
poster_fetcher: Arc::new(PanicFetcher),
|
||||
poster_storage: Arc::new(PanicStorage),
|
||||
event_publisher: Arc::new(NoopPublisher),
|
||||
auth_service: Arc::new(PanicAuth),
|
||||
password_hasher: Arc::new(PanicHasher),
|
||||
user_repository: Arc::new(PanicUserRepo),
|
||||
config: AppConfig { allow_registration: false },
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_logged_is_ignored() {
|
||||
let handler = PosterSyncHandler::new(panic_ctx(), 3);
|
||||
let event = DomainEvent::ReviewLogged {
|
||||
review_id: ReviewId::generate(),
|
||||
movie_id: MovieId::generate(),
|
||||
user_id: UserId::generate(),
|
||||
rating: Rating::new(4).unwrap(),
|
||||
watched_at: chrono::NaiveDateTime::from_timestamp_opt(0, 0).unwrap(),
|
||||
};
|
||||
// returns Ok without touching any panic stubs
|
||||
assert!(handler.handle(&event).await.is_ok());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Expose the module in `lib.rs`**
|
||||
|
||||
Add to `crates/presentation/src/lib.rs`:
|
||||
|
||||
```rust
|
||||
pub mod event_handlers;
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Run the test to verify it fails**
|
||||
|
||||
```bash
|
||||
cargo test -p presentation event_handlers 2>&1 | tail -20
|
||||
```
|
||||
|
||||
Expected: compile error or test failure because `handle` is `todo!()`.
|
||||
|
||||
- [ ] **Step 4: Implement `handle` in `PosterSyncHandler`**
|
||||
|
||||
Replace the `todo!()` body in `crates/presentation/src/event_handlers.rs`:
|
||||
|
||||
```rust
|
||||
#[async_trait]
|
||||
impl EventHandler for PosterSyncHandler {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError> {
|
||||
let (movie_id, external_metadata_id) = match event {
|
||||
DomainEvent::MovieDiscovered {
|
||||
movie_id,
|
||||
external_metadata_id,
|
||||
} => (movie_id.value(), external_metadata_id.value().to_owned()),
|
||||
_ => return Ok(()),
|
||||
};
|
||||
|
||||
let mut last_err: Option<DomainError> = None;
|
||||
for attempt in 0..=self.max_retries {
|
||||
let cmd = SyncPosterCommand {
|
||||
movie_id,
|
||||
external_metadata_id: external_metadata_id.clone(),
|
||||
};
|
||||
match sync_poster::execute(&self.ctx, cmd).await {
|
||||
Ok(()) => return Ok(()),
|
||||
Err(e) => {
|
||||
if attempt < self.max_retries {
|
||||
let delay = Duration::from_secs(2u64.pow(attempt));
|
||||
tracing::warn!(
|
||||
attempt = attempt + 1,
|
||||
max_attempts = self.max_retries + 1,
|
||||
delay_secs = delay.as_secs(),
|
||||
"poster sync failed, retrying: {e}"
|
||||
);
|
||||
tokio::time::sleep(delay).await;
|
||||
}
|
||||
last_err = Some(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let err = last_err.unwrap();
|
||||
tracing::error!(
|
||||
attempts = self.max_retries + 1,
|
||||
"poster sync failed after all attempts: {err}"
|
||||
);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Run the test to verify it passes**
|
||||
|
||||
```bash
|
||||
cargo test -p presentation event_handlers 2>&1 | tail -20
|
||||
```
|
||||
|
||||
Expected: `test result: ok. 1 passed`
|
||||
|
||||
- [ ] **Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/presentation/src/event_handlers.rs crates/presentation/src/lib.rs
|
||||
git commit -m "feat(presentation): implement PosterSyncHandler with retry"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Wire `PosterSyncHandler` in `main.rs`
|
||||
|
||||
**Files:**
|
||||
- Modify: `crates/presentation/src/main.rs`
|
||||
|
||||
- [ ] **Step 1: Add the import**
|
||||
|
||||
In `crates/presentation/src/main.rs`, update the import block. The existing line is:
|
||||
|
||||
```rust
|
||||
use event_publisher::{EventPublisherConfig, create_event_channel};
|
||||
```
|
||||
|
||||
Add below it:
|
||||
|
||||
```rust
|
||||
use presentation::event_handlers::PosterSyncHandler;
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Wire the handler**
|
||||
|
||||
In `wire_dependencies`, find the two existing lines:
|
||||
|
||||
```rust
|
||||
let (event_publisher, event_worker) = create_event_channel(EventPublisherConfig::from_env());
|
||||
tokio::spawn(event_worker.run());
|
||||
```
|
||||
|
||||
Replace with:
|
||||
|
||||
```rust
|
||||
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
|
||||
let (event_publisher, event_worker) = create_event_channel(
|
||||
EventPublisherConfig::from_env(),
|
||||
vec![Box::new(poster_handler)],
|
||||
);
|
||||
tokio::spawn(event_worker.run());
|
||||
```
|
||||
|
||||
Note: `app_ctx.clone()` is cheap — all fields are `Arc<dyn Trait>`.
|
||||
|
||||
- [ ] **Step 3: Build the full workspace**
|
||||
|
||||
```bash
|
||||
cargo build 2>&1 | tail -20
|
||||
```
|
||||
|
||||
Expected: clean build with no errors.
|
||||
|
||||
- [ ] **Step 4: Run all tests**
|
||||
|
||||
```bash
|
||||
cargo test 2>&1 | tail -20
|
||||
```
|
||||
|
||||
Expected: all tests pass.
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/presentation/src/main.rs
|
||||
git commit -m "feat(presentation): wire PosterSyncHandler into event worker"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Verification
|
||||
|
||||
After all tasks complete, smoke-test end-to-end:
|
||||
|
||||
```bash
|
||||
# Start the server
|
||||
RUST_LOG=info cargo run -p presentation
|
||||
|
||||
# In another terminal: log a review for a movie not yet in the DB
|
||||
# (requires valid JWT — use the existing login endpoint first)
|
||||
|
||||
# Watch the server logs for:
|
||||
# event: movie_discovered movie_id=<uuid> external_id=tt...
|
||||
# poster sync attempt logs (or success with no retries needed)
|
||||
```
|
||||
|
||||
To confirm the poster was stored, check the configured object store bucket/directory for a file named with the movie's UUID.
|
||||
@@ -1,115 +0,0 @@
|
||||
# Event-Driven Poster Sync
|
||||
|
||||
**Date:** 2026-05-04
|
||||
**Status:** Approved
|
||||
|
||||
## Problem
|
||||
|
||||
The `EventPublisher` infrastructure exists but only logs events via tracing. When a new movie is discovered (`MovieDiscovered` event), its poster should be automatically downloaded and stored — currently this requires a manual `POST /api/movies/{id}/sync-poster` call.
|
||||
|
||||
## Scope
|
||||
|
||||
- Introduce an `EventHandler` trait for composable event side-effects
|
||||
- Implement `PosterSyncHandler` that reacts to `MovieDiscovered` by running the existing `sync_poster` use case with retry
|
||||
- RSS feed is already generated fresh on every request — no event work needed there
|
||||
|
||||
## Design
|
||||
|
||||
### `EventHandler` trait (in `event-publisher` crate)
|
||||
|
||||
```rust
|
||||
#[async_trait]
|
||||
pub trait EventHandler: Send + Sync {
|
||||
async fn handle(&self, event: &DomainEvent) -> Result<(), DomainError>;
|
||||
}
|
||||
```
|
||||
|
||||
Lives in `event-publisher` crate alongside `ChannelEventPublisher`. Depends only on `domain` — no new crate dependencies required.
|
||||
|
||||
### `EventWorker` update
|
||||
|
||||
`EventWorker` gains a `handlers: Vec<Box<dyn EventHandler>>` field. On each received event:
|
||||
1. Log the event via tracing (existing behavior, kept as default baseline)
|
||||
2. Fan out to all handlers concurrently (or sequentially — see note below)
|
||||
3. Handler errors are logged at ERROR level but do not stop the worker or other handlers
|
||||
|
||||
`create_event_channel` signature gains a `handlers` parameter:
|
||||
```rust
|
||||
pub fn create_event_channel(
|
||||
config: EventPublisherConfig,
|
||||
handlers: Vec<Box<dyn EventHandler>>,
|
||||
) -> (ChannelEventPublisher, EventWorker)
|
||||
```
|
||||
|
||||
**Fan-out strategy:** sequential for now (simpler, avoids concurrent mutation of shared state). If handler latency becomes a concern, switch to `tokio::join_all`.
|
||||
|
||||
### `PosterSyncHandler` (in `presentation` crate)
|
||||
|
||||
New file: `crates/presentation/src/event_handlers.rs`
|
||||
|
||||
```rust
|
||||
pub struct PosterSyncHandler {
|
||||
ctx: AppContext,
|
||||
max_retries: u32,
|
||||
}
|
||||
```
|
||||
|
||||
Behavior per event:
|
||||
- `MovieDiscovered { movie_id, external_metadata_id }` → build `SyncPosterCommand`, call `sync_poster::execute()` with exponential backoff
|
||||
- All other events → `Ok(())` immediately
|
||||
|
||||
**Retry logic:** up to `max_retries` retries after the initial attempt (default: 3, so 4 total attempts), with exponential backoff delays 1s → 2s → 4s before each retry. After exhausting all attempts, logs at ERROR level and returns `Err`.
|
||||
|
||||
### Wiring (`main.rs`)
|
||||
|
||||
```rust
|
||||
let poster_handler = PosterSyncHandler::new(app_ctx.clone(), 3); // 3 retries = 4 total attempts
|
||||
let (event_publisher, event_worker) = create_event_channel(
|
||||
EventPublisherConfig::from_env(),
|
||||
vec![Box::new(poster_handler)],
|
||||
);
|
||||
tokio::spawn(event_worker.run());
|
||||
```
|
||||
|
||||
`AppContext` is `Clone` (all fields are `Arc<dyn Trait>`), so cloning for the handler is cheap.
|
||||
|
||||
## Data Flow
|
||||
|
||||
```
|
||||
POST /api/diary
|
||||
→ log_review::execute()
|
||||
→ movie not in DB → fetch metadata → MovieDiscovered published
|
||||
→ review saved → ReviewLogged published
|
||||
↓
|
||||
mpsc channel
|
||||
↓
|
||||
EventWorker::run()
|
||||
→ tracing log
|
||||
→ PosterSyncHandler::handle(MovieDiscovered)
|
||||
→ sync_poster::execute() [attempt 1]
|
||||
→ on failure: sleep 1s → attempt 2
|
||||
→ on failure: sleep 2s → attempt 3
|
||||
→ on failure: log ERROR, done
|
||||
```
|
||||
|
||||
## Files Changed
|
||||
|
||||
| File | Change |
|
||||
|---|---|
|
||||
| `crates/adapters/event-publisher/src/lib.rs` | Add `EventHandler` trait; update `EventWorker` and `create_event_channel` |
|
||||
| `crates/presentation/src/event_handlers.rs` | New — `PosterSyncHandler` |
|
||||
| `crates/presentation/src/main.rs` | Wire `PosterSyncHandler` into `create_event_channel` |
|
||||
|
||||
No new crate dependencies. No changes to domain or application layers.
|
||||
|
||||
## Verification
|
||||
|
||||
```bash
|
||||
cargo build # full workspace clean build
|
||||
cargo test # existing tests still pass
|
||||
|
||||
# Manual: log a review for a new movie
|
||||
# → check logs for "event: movie_discovered"
|
||||
# → check logs for poster sync attempt
|
||||
# → check object store / storage for saved poster file
|
||||
```
|
||||
@@ -1,30 +0,0 @@
|
||||
# Frontend HTML/CSS Design
|
||||
|
||||
**Date:** 2026-05-04
|
||||
|
||||
## Summary
|
||||
|
||||
Server-rendered HTML frontend using Rust/Axum + Askama templates + HTTP-only cookie JWT auth. No JavaScript.
|
||||
|
||||
## Pages
|
||||
|
||||
| Route | Access | Description |
|
||||
|---|---|---|
|
||||
| GET / | public | Diary index |
|
||||
| GET /login | public | Login form |
|
||||
| POST /login | public | Set cookie → redirect / |
|
||||
| GET /logout | — | Clear cookie → redirect / |
|
||||
| GET /register | public | Only if ALLOW_REGISTRATION |
|
||||
| POST /register | public | Set cookie → redirect / |
|
||||
| GET /reviews/new | auth | New review form |
|
||||
| POST /reviews | auth | Log review → redirect / |
|
||||
|
||||
## Design Decisions
|
||||
|
||||
- **Auth:** Cookie-based JWT (HttpOnly, SameSite=Lax). Existing Bearer auth untouched.
|
||||
- **Template inheritance:** base.html owns header. Child templates use {% extends %}/{% block %}.
|
||||
- **Entry layout:** Poster thumbnail (60px) + text block. Fallback to text-only when no poster.
|
||||
- **Header (logged out):** [Login] [Register?]
|
||||
- **Header (logged in):** [Add Review] email@example.com [Logout]
|
||||
- **Form errors:** PRG → redirect back with ?error=<msg>
|
||||
- **Diary visibility:** Public (anyone can read, auth required to add)
|
||||
@@ -34,6 +34,21 @@ body {
|
||||
background: url("/static/background.avif") center / cover no-repeat fixed;
|
||||
min-height: 100%;
|
||||
line-height: 1.5;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
body::before {
|
||||
content: "";
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0, 0, 0, 0.2);
|
||||
z-index: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
body > * {
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
a {
|
||||
@@ -471,6 +486,7 @@ form button[type="submit"]:hover {
|
||||
text-align: center;
|
||||
min-height: 48px;
|
||||
display: flex; flex-direction: column; align-items: center; justify-content: center;
|
||||
background: oklch(85.2% 0.199 91.936 / var(--alpha, 0.05));
|
||||
}
|
||||
.heatmap-count { font-size: 0.85rem; font-weight: 700; }
|
||||
.heatmap-month { font-size: 0.65rem; opacity: 0.6; margin-top: 2px; }
|
||||
@@ -489,10 +505,10 @@ form button[type="submit"]:hover {
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
gap: 4px;
|
||||
height: 80px;
|
||||
}
|
||||
.bar-col { flex: 1; display: flex; flex-direction: column; align-items: center; justify-content: flex-end; gap: 3px; }
|
||||
.bar-fill { width: 100%; background: var(--primary); border-radius: 3px 3px 0 0; min-height: 4px; opacity: 0.8; }
|
||||
.bar-col { flex: 1; display: flex; flex-direction: column; align-items: center; gap: 2px; }
|
||||
.bar-value { font-size: 0.6rem; color: var(--primary); opacity: 0.9; line-height: 1; }
|
||||
.bar-fill { width: 100%; background: var(--primary); border-radius: 3px 3px 0 0; min-height: 3px; opacity: 0.8; }
|
||||
.bar-month { font-size: 0.65rem; opacity: 0.5; }
|
||||
|
||||
.director-chart { display: flex; flex-direction: column; gap: 6px; }
|
||||
|
||||
Reference in New Issue
Block a user