Compare commits

...

61 Commits

Author SHA1 Message Date
e3a65d8052 fix: use StdRng for shuffling indices in fill_block function 2026-03-20 23:12:44 +01:00
f45ca77b79 fix: format code for improved readability and consistency 2026-03-20 01:57:22 +01:00
a5c31ef8a9 fix(frontend): restore plain type labels in grouped sidebar 2026-03-20 01:56:11 +01:00
3662a5ab9e fix(frontend): suppress shows when type filter active; clarify grouped type labels 2026-03-20 01:53:13 +01:00
137251fe37 fix(frontend): restore ALL sentinel in sidebar to fix hydration mismatch 2026-03-20 01:33:27 +01:00
8101734c63 feat(frontend): add useLibraryShows and useLibrarySeasons hooks 2026-03-20 01:29:55 +01:00
6cf8a6d5e3 feat(frontend): implement grouped/drilldown view in library grid 2026-03-20 01:23:33 +01:00
c5317cb639 feat(frontend): add viewMode/drilldown state to library page 2026-03-20 01:21:54 +01:00
5f66493558 feat(api): add /library/shows and /library/shows/:name/seasons routes + season filter 2026-03-20 01:19:31 +01:00
5cc4cde223 feat(frontend): add ShowTile, SeasonTile, BreadcrumbNav components 2026-03-20 01:19:08 +01:00
5b89481104 feat(frontend): extend schedule dialog to support show/series selection 2026-03-20 01:19:00 +01:00
33338ac100 feat(frontend): make library sidebar drilldown-aware 2026-03-20 01:18:52 +01:00
66eef2c82e feat(frontend): add useLibraryShows and useLibrarySeasons hooks 2026-03-20 01:18:34 +01:00
6f1a4e19d3 feat(infra): implement list_shows, list_seasons + season_number filter 2026-03-20 01:16:02 +01:00
dd69470ee4 feat(frontend): add ShowSummary, SeasonSummary types + library shows/seasons API methods 2026-03-20 01:14:43 +01:00
23722a771b feat(domain): add ShowSummary, SeasonSummary types + ILibraryRepository methods 2026-03-20 01:13:00 +01:00
4cf7fdc1c2 feat(frontend): add library sync interval + sync now to admin settings panel 2026-03-20 00:38:04 +01:00
91271bd83c feat(frontend): library page, components, and schedule/add-to-block dialogs (tasks 11-14) 2026-03-20 00:35:40 +01:00
49c7f7abd7 feat(frontend): add useLibrarySearch, useLibrarySyncStatus, useTriggerSync, useAdminSettings hooks 2026-03-20 00:30:44 +01:00
978ad1cdb0 feat(frontend): add library paged types, syncStatus/triggerSync/admin API methods 2026-03-20 00:30:03 +01:00
e1a885dcc9 fix(api): mount admin settings routes at /admin/settings (not /admin/library/settings) 2026-03-20 00:28:24 +01:00
e849548e9e feat(api): replace live-provider library routes with DB-backed routes; add sync + admin settings endpoints 2026-03-20 00:27:06 +01:00
d92d629fbc feat(api): wire library_repo, app_settings_repo, library_sync_adapter into AppState; start scheduler 2026-03-20 00:23:25 +01:00
aa5e3c28aa feat(api): add library sync background task 2026-03-20 00:23:22 +01:00
64138b07e4 feat(infra): add FullSyncAdapter for library sync 2026-03-20 00:19:45 +01:00
6732576d06 feat(infra): add SqliteAppSettingsRepository 2026-03-20 00:17:05 +01:00
a3a421c0ac feat(infra): add SqliteLibraryRepository 2026-03-20 00:15:01 +01:00
c6c93766c7 refactor(domain): remove redundant IAppSettingsRepository re-export; add TODO for Jellyfin enrichment 2026-03-20 00:11:30 +01:00
e101b44fa5 feat(domain): add library types, LibrarySyncAdapter, ILibraryRepository, IAppSettingsRepository; extend MediaItem with thumbnail_url and collection_id 2026-03-20 00:08:10 +01:00
666b1f2753 feat(db): add missing indexes to library migrations 2026-03-20 00:03:27 +01:00
a7c3f1f92e feat(db): add library_items, library_sync_log, app_settings migrations 2026-03-20 00:01:34 +01:00
187cd064fb docs: add library management implementation plan 2026-03-19 23:57:05 +01:00
4cc0e155bd docs: add library management design spec 2026-03-19 23:43:37 +01:00
175d0bb0bb fix(tests): add missing refresh_expiry_days param to JwtConfig::new in tests 2026-03-19 23:03:36 +01:00
311fdd4006 feat: multi-instance provider support
- provider_configs: add id TEXT PK; migrate existing rows (provider_type becomes id)
- local_files_index: add provider_id column + index; scope all queries per instance
- ProviderConfigRow: add id field; add get_by_id to trait
- LocalIndex:🆕 add provider_id param; all SQL scoped by provider_id
- factory: thread provider_id through build_local_files_bundle
- AppState.local_index: Option<Arc<LocalIndex>> → HashMap<String, Arc<LocalIndex>>
- admin_providers: restructured routes (POST /admin/providers create, PUT/DELETE /{id}, POST /test)
- admin_providers: use row.id as registry key for jellyfin and local_files
- files.rescan: optional ?provider=<id> query param
- frontend: add id to ProviderConfig, update api/hooks, new multi-instance panel UX
2026-03-19 22:54:41 +01:00
373e1c7c0a fix: remove default-run entry from Cargo.toml 2026-03-19 22:34:09 +01:00
d2412da057 feat(auth): refresh tokens + remember me
Backend: add refresh JWT (30d, token_type claim), POST /auth/refresh
endpoint (rotates token pair), remember_me on login, JWT_REFRESH_EXPIRY_DAYS
env var. Extractors now reject refresh tokens on protected routes.

Frontend: sessionStorage for non-remembered sessions, localStorage +
refresh token for remembered sessions. Transparent 401 recovery in
api.ts (retry once after refresh). Remember me checkbox on login page
with security note when checked.
2026-03-19 22:24:26 +01:00
8bdd5e2277 fix(infra): deserialize channel schedule_config via ScheduleConfigCompat for V1 compat 2026-03-17 14:56:09 +01:00
26343b08f8 fix: test mocks for new trait methods, V1 schedule_config re-import, stale comments 2026-03-17 14:53:23 +01:00
6d350940b9 feat(frontend): schedule history dialog with rollback, wire ConfigHistorySheet 2026-03-17 14:48:39 +01:00
ba6abad602 feat(frontend): weekly grid editor with day tabs and copy shortcut 2026-03-17 14:46:34 +01:00
c0da075f03 feat(frontend): config history sheet with pin and restore 2026-03-17 14:45:00 +01:00
6bfb148e39 feat(frontend): config history and schedule rollback hooks 2026-03-17 14:43:12 +01:00
45c05b5720 fix: snapshot existing config before update; rollback returns 200 2026-03-17 14:41:57 +01:00
bd498b9bcb feat(frontend): ScheduleConfig V2 types, weekday schema, export update 2026-03-17 14:39:19 +01:00
20e80ac28e feat: config history — auto-snapshot on update, list/pin/restore endpoints 2026-03-17 14:39:09 +01:00
ad3a73f061 feat: schedule history — list, detail, rollback endpoints 2026-03-17 14:38:51 +01:00
c0fb8f69de feat(infra): implement config snapshot repository methods 2026-03-17 14:32:04 +01:00
8b8e8a8d8c fix(mcp): update block mutations for ScheduleConfig V2 day_blocks 2026-03-17 14:32:02 +01:00
05d2d77515 feat(infra): schedule history list, get-by-id, delete-after methods 2026-03-17 14:32:02 +01:00
8b701745bf fix(api): update block lookups to use all_blocks() after ScheduleConfig V2 2026-03-17 14:31:24 +01:00
a79ee1b228 feat(domain): 7-day generation window, day_blocks lookup by weekday 2026-03-17 14:29:10 +01:00
d8e39c66be feat(infra): add channel_config_snapshots migration 2026-03-17 14:28:35 +01:00
055937fc3d fix(domain): use ChannelId type in patch_config_snapshot_label 2026-03-17 14:27:41 +01:00
1338f6bace feat(domain): extend ChannelRepository and ScheduleRepository ports for history 2026-03-17 14:25:51 +01:00
995f5b1339 feat(domain): add ChannelConfigSnapshot entity 2026-03-17 14:25:49 +01:00
22bee4f32c feat(domain): ScheduleConfig V2 day-keyed weekly grid with V1 compat 2026-03-17 14:21:00 +01:00
5f1421f4bd fix(domain): improve Weekday tests and document all() ordering 2026-03-17 14:18:13 +01:00
f8e8e85cb0 feat(domain): add Weekday enum with From<chrono::Weekday> 2026-03-17 14:16:16 +01:00
c550790287 feat: add find_last_slot_per_block method to schedule repositories and update related logic 2026-03-17 13:02:20 +01:00
d8dd047020 feat: implement local-files feature with various enhancements and cleanup 2026-03-17 03:00:39 +01:00
98 changed files with 8559 additions and 535 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,255 @@
# Library Management — Design Spec
**Date:** 2026-03-19
**Status:** Approved
## Context
K-TV currently has ephemeral library browsing: metadata is always fetched live from providers (Jellyfin, local files) on demand, only accessible through the block editor filter UI. There is no persistent library, no cross-provider browsing, and no way to schedule directly from browsing media.
This feature introduces an in-house library that syncs and stores media metadata from all providers into k-tv's own DB, then surfaces it through a first-class `/library` page where users can browse, filter, multi-select, and schedule media directly onto channels.
---
## Data Model
### Migration `20260319000002_add_library_tables.sql`
**`library_items` table**
| Column | Type | Notes |
|---|---|---|
| `id` | TEXT PK | `"{provider_id}::{raw_item_id}"` — double-colon, matches existing registry prefix format |
| `provider_id` | TEXT | `"jellyfin"`, `"local"`, etc. |
| `external_id` | TEXT | Raw ID from provider (for re-fetching) |
| `title` | TEXT | |
| `content_type` | TEXT | `"movie"` \| `"episode"` \| `"short"` |
| `duration_secs` | INTEGER | |
| `series_name` | TEXT | NULL for movies |
| `season_number` | INTEGER | NULL for movies |
| `episode_number` | INTEGER | NULL for movies |
| `year` | INTEGER | |
| `genres` | TEXT | JSON array |
| `tags` | TEXT | JSON array |
| `collection_id` | TEXT | Provider-specific collection ID |
| `collection_name` | TEXT | Human-readable name (synced from provider) |
| `collection_type` | TEXT | e.g. `"movies"`, `"tvshows"` |
| `thumbnail_url` | TEXT | Provider-served image URL; re-fetched on every sync |
| `synced_at` | TEXT | ISO8601 timestamp |
`thumbnail_url` is refreshed on every full sync. Frontend must handle broken image URLs gracefully (show a placeholder on load error) since URLs may break if provider URL or API key changes between syncs.
**`library_sync_log` table**
| Column | Type | Notes |
|---|---|---|
| `id` | INTEGER PK AUTOINCREMENT | |
| `provider_id` | TEXT | |
| `started_at` | TEXT | ISO8601 |
| `finished_at` | TEXT | ISO8601, NULL while running |
| `items_found` | INTEGER | |
| `status` | TEXT | `"running"` \| `"done"` \| `"error"` |
| `error_msg` | TEXT | NULL on success |
### Migration `20260319000003_add_app_settings.sql`
**`app_settings` table** — general-purpose key-value store for admin-configurable settings. Co-exists with the existing `transcode_settings` singleton table (that table is not modified). Seeded with: `INSERT OR IGNORE INTO app_settings(key, value) VALUES ('library_sync_interval_hours', '6')`.
| Column | Type | Notes |
|---|---|---|
| `key` | TEXT PK | |
| `value` | TEXT | Bare JSON scalar stored as text (e.g. `6`, not `"6"`) |
`GET /admin/settings` returns parsed values: `{ "library_sync_interval_hours": 6 }` (number, not string). Backend parses with `serde_json::Value` on read; frontend receives typed JSON.
---
## Backend Architecture
### Sync Engine
**Layer placement:**
- `LibraryItem`, `LibrarySyncResult`, `LibrarySyncAdapter` trait, and `ILibraryRepository` trait live in **`domain/src/library.rs`**
- `FullSyncAdapter` (impl) and `SqliteLibraryRepository` (impl) live in **`infra/src/library/`**
The `LibrarySyncAdapter` domain trait does **not** take a DB pool — DB writes are an infra concern handled entirely inside the impl:
```rust
// domain/src/library.rs
#[async_trait]
pub trait LibrarySyncAdapter: Send + Sync {
async fn sync_provider(
&self,
provider: &dyn IMediaProvider,
provider_id: &str,
) -> LibrarySyncResult;
}
#[async_trait]
pub trait ILibraryRepository: Send + Sync {
async fn search(&self, filter: LibrarySearchFilter) -> Vec<LibraryItem>;
async fn get_by_id(&self, id: &str) -> Option<LibraryItem>;
async fn list_collections(&self, provider_id: Option<&str>) -> Vec<LibraryCollection>;
async fn list_series(&self, provider_id: Option<&str>) -> Vec<String>;
async fn list_genres(&self, content_type: Option<ContentType>, provider_id: Option<&str>) -> Vec<String>;
async fn upsert_items(&self, provider_id: &str, items: Vec<LibraryItem>) -> DomainResult<()>;
async fn clear_provider(&self, provider_id: &str) -> DomainResult<()>;
async fn log_sync_start(&self, provider_id: &str) -> i64; // returns log row id
async fn log_sync_finish(&self, log_id: i64, result: &LibrarySyncResult);
async fn latest_sync_status(&self) -> Vec<LibrarySyncLogEntry>;
async fn is_sync_running(&self, provider_id: &str) -> bool;
}
```
`FullSyncAdapter` in infra holds `Arc<dyn ILibraryRepository>` and calls repo methods internally — no DB pool leaks into domain.
```
infra/src/library/
mod.rs
full_sync.rs -- FullSyncAdapter impl: calls list_collections for names/types,
fetch_items(&MediaFilter::default()), repo.clear_provider + repo.upsert_items
repository.rs -- SqliteLibraryRepository impl of ILibraryRepository
scheduler.rs -- tokio interval task; 10s startup delay (hardcoded); reads interval from
app_settings on each tick via AppSettingsRepository
```
**AppState** gains:
```rust
library_sync_adapter: Arc<dyn LibrarySyncAdapter>,
library_repo: Arc<dyn ILibraryRepository>,
```
### Sync Concurrency Guard
Before starting a sync for a provider, the scheduler and `POST /library/sync` handler both call `repo.is_sync_running(provider_id)`. If `true`, the scheduler skips that provider for this tick; the HTTP endpoint returns **409 Conflict** with body `{ "error": "sync already running for provider" }`. This prevents the truncate+insert race.
### Admin Settings
- `GET /admin/settings` — returns `app_settings` rows as parsed JSON object. Requires `is_admin = true` (`AdminUser` extractor).
- `PUT /admin/settings` — partial update (only provided keys updated). Requires `is_admin = true`. Scheduler reads new value on next tick.
### Library API Routes (all require authenticated user)
| Endpoint | Notes |
|---|---|
| `GET /library/items?type=&series[]=&collection=&genre=&decade=&min_duration=&max_duration=&search=&provider=&offset=0&limit=50` | DB-backed; returns `{ items: LibraryItemResponse[], total: u32 }` |
| `GET /library/items/:id` | Single item |
| `GET /library/collections?provider=` | `{ id, name, collection_type }[]` from DB |
| `GET /library/series?provider=` | `String[]` from DB |
| `GET /library/genres?type=&provider=` | `String[]` from DB |
| `GET /library/sync/status` | `LibrarySyncLogEntry[]` (latest per provider) |
| `POST /library/sync` | Fires sync; 409 if already running; requires `is_admin = true` |
| `GET /admin/settings` | `{ key: value }` map (parsed); requires `is_admin = true` |
| `PUT /admin/settings` | Partial update; requires `is_admin = true` |
**Existing library route API contract is unchanged** for all params except `offset`/`limit` (new). Frontend `use-library.ts` hooks continue working without modification.
---
## Frontend Architecture
### New route: `/library`
Added to main nav alongside Dashboard and TV.
```
app/(main)/library/
page.tsx -- layout, search/filter state, pagination state, multi-select state
components/
library-sidebar.tsx -- provider picker, type, genre chips, series picker, decade, duration range
library-grid.tsx -- paginated grid of LibraryItemCard
library-item-card.tsx -- thumbnail (with broken-image fallback placeholder), title,
duration badge, content type, checkbox
schedule-from-library-dialog.tsx -- modal (see flow below)
add-to-block-dialog.tsx -- modal (see flow below)
sync-status-bar.tsx -- "Last synced 2h ago · Jellyfin" strip at top
```
### New hooks
```
hooks/use-library-search.ts -- useLibrarySearch(filter, page): wraps GET /library/items with
offset/limit pagination. Query key: ["library", "search", filter, page].
onSuccess of useTriggerSync: invalidate ["library", "search"] and ["library", "sync"].
hooks/use-library-sync.ts -- useLibrarySyncStatus() → ["library", "sync"],
useTriggerSync() → POST /library/sync; on success invalidates
["library", "search"] and ["library", "sync"]
hooks/use-admin-settings.ts -- useAdminSettings(), useUpdateAdminSettings()
```
Existing `use-library.ts` and its four hooks (`useCollections`, `useSeries`, `useGenres`, `useLibraryItems`) are **unchanged** — still used by `AlgorithmicFilterEditor` in the block editor.
### Schedule From Library Flow
1. User selects one or more items → floating action bar at bottom
2. "Schedule on channel" → `ScheduleFromLibraryDialog` modal
3. Modal fields (in order — time/days/strategy disabled until channel is selected):
- **Channel** picker (required; enables remaining fields once selected)
- **Days**: MonSun checkboxes
- **Time**: `NaiveTime` input interpreted in the selected channel's timezone. Timezone label displayed inline (e.g. "20:00 Europe/Warsaw"). Disabled until channel is selected.
- **Duration**: For single item, defaults to `ceil(duration_secs / 60)` minutes shown in UI. For multi-item, user sets manually. Rounding to nearest minute shown explicitly (e.g. "1h 35m (rounded from 1h 34m 47s)").
- **Fill strategy**: Sequential (default for episodic) | Random | Best Fit
4. Preview: *"3 blocks will be created on [Channel] — Mon/Wed/Fri at 20:00 [Europe/Warsaw], Sequential"*
5. Confirm → `PUT /channels/:id` merging new `ProgrammingBlock` entries into `schedule_config.day_blocks`:
- Series / episodic: **Algorithmic** block with `series_names: [series]`
- Specific item(s): **Manual** block with those item IDs
### Add To Block Flow
1. User selects items → "Add to block" from action bar
2. `AddToBlockDialog`:
- Pick channel
- Pick existing **manual** block: populated from `useChannel(id)` by collecting all blocks across all days with `content.type === "manual"`, **deduplicated by block `id`** (same block appearing Mon + Wed shown once)
3. Confirm → appends item IDs to that block. Since the same block object (by `id`) may appear in multiple days in `schedule_config.day_blocks`, the PUT updates **all day entries that contain that block id** — the block is mutated wherever it appears, consistently.
### Admin Settings UI
Settings panel (cog icon in dashboard header, alongside existing transcode settings) gains a "Library sync" section:
- Number input: "Sync interval (hours)"
- "Sync now" button (visible to admin users only; calls `POST /library/sync`; disabled + shows spinner while running)
- Status: "Last synced: [time] · [N] items" per provider from `GET /library/sync/status`
---
## Key Files Modified
**Backend:**
- `domain/src/lib.rs` — add `library` module
- `domain/src/library.rs` — new: `LibraryItem`, `LibraryCollection`, `LibrarySyncResult`, `LibrarySyncAdapter` trait, `ILibraryRepository` trait, `LibrarySearchFilter`, `LibrarySyncLogEntry`
- `infra/src/library/full_sync.rs``FullSyncAdapter` impl
- `infra/src/library/repository.rs``SqliteLibraryRepository` impl
- `infra/src/library/scheduler.rs` — tokio interval task, 10s startup delay
- `api/src/routes/library.rs` — DB-backed handlers + sync/admin routes
- `api/src/routes/mod.rs` — wire admin settings routes
- `api/src/main.rs` — start sync scheduler task
- `api/src/state.rs` — add `library_sync_adapter: Arc<dyn LibrarySyncAdapter>`, `library_repo: Arc<dyn ILibraryRepository>`
- `migrations_sqlite/20260319000002_add_library_tables.sql`
- `migrations_sqlite/20260319000003_add_app_settings.sql`
**Frontend:**
- `lib/types.ts` — add `LibraryItem`, `LibraryCollection`, `SyncLogEntry`, `AdminSettings`
- `lib/api.ts` — add `api.library.items(filter, page)`, `api.library.syncStatus()`, `api.library.triggerSync()`, `api.admin.getSettings()`, `api.admin.updateSettings(partial)`
- `app/(main)/layout.tsx` — add Library nav link
- New files per structure above
---
## Verification
1. **Sync**: `POST /library/sync` → 200. `GET /library/sync/status` shows `done` with item count. `library_items` rows in DB have `collection_name` and `thumbnail_url` populated.
2. **Sync dedup**: Second `POST /library/sync` while first is running → 409 Conflict.
3. **Library API pagination**: `GET /library/items?offset=0&limit=10` returns 10 items + `total`. `?offset=10&limit=10` returns next page.
4. **Provider filter**: `GET /library/items?provider=jellyfin` returns only Jellyfin items.
5. **Collections**: `GET /library/collections` returns `{ id, name, collection_type }` objects.
6. **Admin guard**: `POST /library/sync` and `PUT /admin/settings` with non-admin user → 403.
7. **Admin settings**: `PUT /admin/settings { "library_sync_interval_hours": 2 }``GET /admin/settings` returns `{ "library_sync_interval_hours": 2 }` (number). Scheduler uses new interval.
8. **Library UI**: `/library` page loads, sidebar filters update grid, pagination controls work. `sync-status-bar` shows last sync time.
9. **Broken thumbnail**: Item with a broken `thumbnail_url` shows fallback placeholder in `library-item-card`.
10. **Multi-select action bar**: Select 3 items → action bar appears with "Schedule on channel" and "Add to block".
11. **Schedule flow — time gating**: Time input is disabled until channel is selected; timezone shown next to input after channel selected.
12. **Schedule flow — rounding**: Single-item selection shows rounded duration with note in dialog.
13. **Schedule flow — confirm**: Series scheduled → Dashboard shows Algorithmic blocks on correct days with `series_names` filter.
14. **Add to block — dedup**: Block appearing on Mon+Wed shown once in picker. Confirming updates both days.
15. **Cache invalidation**: After `useTriggerSync()` resolves, `["library", "search"]` and `["library", "sync"]` query keys are invalidated, grid refreshes.
16. **Block editor unchanged**: `AlgorithmicFilterEditor` works; `useLibraryItems` in `use-library.ts` unchanged.
17. **Regression**: `cargo test` passes.

View File

@@ -702,6 +702,7 @@ dependencies = [
"email_address", "email_address",
"rand 0.8.5", "rand 0.8.5",
"serde", "serde",
"serde_json",
"thiserror 2.0.17", "thiserror 2.0.17",
"tokio", "tokio",
"url", "url",

View File

@@ -36,6 +36,7 @@ pub struct Config {
pub jwt_issuer: Option<String>, pub jwt_issuer: Option<String>,
pub jwt_audience: Option<String>, pub jwt_audience: Option<String>,
pub jwt_expiry_hours: u64, pub jwt_expiry_hours: u64,
pub jwt_refresh_expiry_days: u64,
/// Whether the application is running in production mode /// Whether the application is running in production mode
pub is_production: bool, pub is_production: bool,
@@ -117,6 +118,11 @@ impl Config {
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.unwrap_or(24); .unwrap_or(24);
let jwt_refresh_expiry_days = env::var("JWT_REFRESH_EXPIRY_DAYS")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(30);
let is_production = env::var("PRODUCTION") let is_production = env::var("PRODUCTION")
.or_else(|_| env::var("RUST_ENV")) .or_else(|_| env::var("RUST_ENV"))
.map(|v| v.to_lowercase() == "production" || v == "1" || v == "true") .map(|v| v.to_lowercase() == "production" || v == "1" || v == "true")
@@ -165,6 +171,7 @@ impl Config {
jwt_issuer, jwt_issuer,
jwt_audience, jwt_audience,
jwt_expiry_hours, jwt_expiry_hours,
jwt_refresh_expiry_days,
is_production, is_production,
allow_registration, allow_registration,
jellyfin_base_url, jellyfin_base_url,

View File

@@ -15,6 +15,15 @@ pub struct LoginRequest {
pub email: Email, pub email: Email,
/// Password is validated on deserialization (min 8 chars) /// Password is validated on deserialization (min 8 chars)
pub password: Password, pub password: Password,
/// When true, a refresh token is also issued for persistent sessions
#[serde(default)]
pub remember_me: bool,
}
/// Refresh token request
#[derive(Debug, Deserialize)]
pub struct RefreshRequest {
pub refresh_token: String,
} }
/// Register request with validated email and password newtypes /// Register request with validated email and password newtypes
@@ -41,6 +50,9 @@ pub struct TokenResponse {
pub access_token: String, pub access_token: String,
pub token_type: String, pub token_type: String,
pub expires_in: u64, pub expires_in: u64,
/// Only present when remember_me was true at login, or on token refresh
#[serde(skip_serializing_if = "Option::is_none")]
pub refresh_token: Option<String>,
} }
/// Per-provider info returned by `GET /config`. /// Per-provider info returned by `GET /config`.
@@ -114,7 +126,7 @@ pub struct UpdateChannelRequest {
pub description: Option<String>, pub description: Option<String>,
pub timezone: Option<String>, pub timezone: Option<String>,
/// Replace the entire schedule config (template import/edit) /// Replace the entire schedule config (template import/edit)
pub schedule_config: Option<domain::ScheduleConfig>, pub schedule_config: Option<domain::ScheduleConfigCompat>,
pub recycle_policy: Option<domain::RecyclePolicy>, pub recycle_policy: Option<domain::RecyclePolicy>,
pub auto_schedule: Option<bool>, pub auto_schedule: Option<bool>,
pub access_mode: Option<domain::AccessMode>, pub access_mode: Option<domain::AccessMode>,
@@ -180,6 +192,34 @@ impl From<domain::Channel> for ChannelResponse {
} }
} }
// ============================================================================
// Config history DTOs
// ============================================================================
#[derive(Debug, Serialize)]
pub struct ConfigSnapshotResponse {
pub id: Uuid,
pub version_num: i64,
pub label: Option<String>,
pub created_at: DateTime<Utc>,
}
impl From<domain::ChannelConfigSnapshot> for ConfigSnapshotResponse {
fn from(s: domain::ChannelConfigSnapshot) -> Self {
Self {
id: s.id,
version_num: s.version_num,
label: s.label,
created_at: s.created_at,
}
}
}
#[derive(Debug, Deserialize)]
pub struct PatchSnapshotRequest {
pub label: Option<String>,
}
// ============================================================================ // ============================================================================
// EPG / playback DTOs // EPG / playback DTOs
// ============================================================================ // ============================================================================
@@ -245,8 +285,7 @@ impl ScheduledSlotResponse {
pub fn with_block_access(slot: domain::ScheduledSlot, channel: &domain::Channel) -> Self { pub fn with_block_access(slot: domain::ScheduledSlot, channel: &domain::Channel) -> Self {
let block_access_mode = channel let block_access_mode = channel
.schedule_config .schedule_config
.blocks .all_blocks()
.iter()
.find(|b| b.id == slot.source_block_id) .find(|b| b.id == slot.source_block_id)
.map(|b| b.access_mode.clone()) .map(|b| b.access_mode.clone())
.unwrap_or_default(); .unwrap_or_default();
@@ -287,22 +326,46 @@ pub struct ScheduleResponse {
// Transcode DTOs // Transcode DTOs
// ============================================================================ // ============================================================================
#[cfg(feature = "local-files")]
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub struct TranscodeSettingsResponse { pub struct TranscodeSettingsResponse {
pub cleanup_ttl_hours: u32, pub cleanup_ttl_hours: u32,
} }
#[cfg(feature = "local-files")]
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct UpdateTranscodeSettingsRequest { pub struct UpdateTranscodeSettingsRequest {
pub cleanup_ttl_hours: u32, pub cleanup_ttl_hours: u32,
} }
#[cfg(feature = "local-files")]
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub struct TranscodeStatsResponse { pub struct TranscodeStatsResponse {
pub cache_size_bytes: u64, pub cache_size_bytes: u64,
pub item_count: usize, pub item_count: usize,
} }
#[derive(Debug, Serialize)]
pub struct ScheduleHistoryEntry {
pub id: Uuid,
pub generation: u32,
pub valid_from: DateTime<Utc>,
pub valid_until: DateTime<Utc>,
pub slot_count: usize,
}
impl From<domain::GeneratedSchedule> for ScheduleHistoryEntry {
fn from(s: domain::GeneratedSchedule) -> Self {
Self {
id: s.id,
generation: s.generation,
valid_from: s.valid_from,
valid_until: s.valid_until,
slot_count: s.slots.len(),
}
}
}
impl From<domain::GeneratedSchedule> for ScheduleResponse { impl From<domain::GeneratedSchedule> for ScheduleResponse {
fn from(s: domain::GeneratedSchedule) -> Self { fn from(s: domain::GeneratedSchedule) -> Self {
Self { Self {

View File

@@ -36,11 +36,15 @@ pub enum ApiError {
#[error("auth_required")] #[error("auth_required")]
AuthRequired, AuthRequired,
#[allow(dead_code)]
#[error("Not found: {0}")] #[error("Not found: {0}")]
NotFound(String), NotFound(String),
#[error("Not implemented: {0}")] #[error("Not implemented: {0}")]
NotImplemented(String), NotImplemented(String),
#[error("Conflict: {0}")]
Conflict(String),
} }
/// Error response body /// Error response body
@@ -154,6 +158,14 @@ impl IntoResponse for ApiError {
details: Some(msg.clone()), details: Some(msg.clone()),
}, },
), ),
ApiError::Conflict(msg) => (
StatusCode::CONFLICT,
ErrorResponse {
error: "Conflict".to_string(),
details: Some(msg.clone()),
},
),
}; };
(status, Json(error_response)).into_response() (status, Json(error_response)).into_response()
@@ -173,10 +185,12 @@ impl ApiError {
Self::NotFound(msg.into()) Self::NotFound(msg.into())
} }
pub fn conflict(msg: impl Into<String>) -> Self {
Self::Conflict(msg.into())
}
pub fn not_implemented(msg: impl Into<String>) -> Self { pub fn not_implemented(msg: impl Into<String>) -> Self {
Self::NotImplemented(msg.into()) Self::NotImplemented(msg.into())
} }
} }
/// Result type alias for API handlers
pub type ApiResult<T> = Result<T, ApiError>;

View File

@@ -67,7 +67,7 @@ impl FromRequestParts<AppState> for OptionalCurrentUser {
let user = validate_jwt_token(&token, state).await.ok(); let user = validate_jwt_token(&token, state).await.ok();
return Ok(OptionalCurrentUser(user)); return Ok(OptionalCurrentUser(user));
} }
return Ok(OptionalCurrentUser(None)); Ok(OptionalCurrentUser(None))
} }
#[cfg(not(feature = "auth-jwt"))] #[cfg(not(feature = "auth-jwt"))]
@@ -122,7 +122,7 @@ pub(crate) async fn validate_jwt_token(token: &str, state: &AppState) -> Result<
.as_ref() .as_ref()
.ok_or_else(|| ApiError::Internal("JWT validator not configured".to_string()))?; .ok_or_else(|| ApiError::Internal("JWT validator not configured".to_string()))?;
let claims = validator.validate_token(token).map_err(|e| { let claims = validator.validate_access_token(token).map_err(|e| {
tracing::debug!("JWT validation failed: {:?}", e); tracing::debug!("JWT validation failed: {:?}", e);
match e { match e {
infra::auth::jwt::JwtError::Expired => { infra::auth::jwt::JwtError::Expired => {

View File

@@ -0,0 +1,64 @@
//! Background library sync task.
//! Fires 10 seconds after startup, then every N hours (read from app_settings).
use std::sync::Arc;
use std::time::Duration;
use domain::IProviderRegistry;
const STARTUP_DELAY_SECS: u64 = 10;
const DEFAULT_INTERVAL_HOURS: u64 = 6;
pub async fn run_library_sync(
sync_adapter: Arc<dyn domain::LibrarySyncAdapter>,
registry: Arc<tokio::sync::RwLock<Arc<infra::ProviderRegistry>>>,
app_settings_repo: Arc<dyn domain::IAppSettingsRepository>,
) {
tokio::time::sleep(Duration::from_secs(STARTUP_DELAY_SECS)).await;
loop {
tick(&sync_adapter, &registry).await;
let interval_hours = load_interval_hours(&app_settings_repo).await;
tokio::time::sleep(Duration::from_secs(interval_hours * 3600)).await;
}
}
async fn load_interval_hours(repo: &Arc<dyn domain::IAppSettingsRepository>) -> u64 {
repo.get("library_sync_interval_hours")
.await
.ok()
.flatten()
.and_then(|v| v.parse::<u64>().ok())
.unwrap_or(DEFAULT_INTERVAL_HOURS)
}
async fn tick(
sync_adapter: &Arc<dyn domain::LibrarySyncAdapter>,
registry: &Arc<tokio::sync::RwLock<Arc<infra::ProviderRegistry>>>,
) {
let reg = registry.read().await;
let provider_ids: Vec<String> = reg.provider_ids();
drop(reg);
for provider_id in provider_ids {
let reg = registry.read().await;
let provider = match reg.get_provider(&provider_id) {
Some(p) => p,
None => continue,
};
drop(reg);
tracing::info!("library-sync: syncing provider '{}'", provider_id);
let result = sync_adapter.sync_provider(provider.as_ref(), &provider_id).await;
if let Some(ref err) = result.error {
tracing::warn!("library-sync: provider '{}' failed: {}", provider_id, err);
} else {
tracing::info!(
"library-sync: provider '{}' done — {} items in {}ms",
provider_id, result.items_found, result.duration_ms
);
}
}
}

View File

@@ -7,12 +7,13 @@ use std::sync::Arc;
use tracing::info; use tracing::info;
use domain::{ChannelService, IProviderRegistry, ScheduleEngineService, UserService}; use domain::{ChannelService, IProviderRegistry, ScheduleEngineService, UserService};
use infra::factory::{build_activity_log_repository, build_channel_repository, build_provider_config_repository, build_schedule_repository, build_user_repository}; use infra::factory::{build_activity_log_repository, build_app_settings_repository, build_channel_repository, build_library_repository, build_provider_config_repository, build_schedule_repository, build_user_repository};
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
use infra::factory::build_transcode_settings_repository; use infra::factory::build_transcode_settings_repository;
mod config; mod config;
mod database; mod database;
mod library_scheduler;
mod provider_registry; mod provider_registry;
mod dto; mod dto;
mod error; mod error;
@@ -80,6 +81,11 @@ async fn main() -> anyhow::Result<()> {
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
let transcode_settings_repo = build_transcode_settings_repository(&db_pool).await.ok(); let transcode_settings_repo = build_transcode_settings_repository(&db_pool).await.ok();
let library_repo = build_library_repository(&db_pool).await?;
let app_settings_repo = build_app_settings_repository(&db_pool).await?;
let library_sync_adapter: Arc<dyn domain::LibrarySyncAdapter> =
Arc::new(infra::FullSyncAdapter::new(Arc::clone(&library_repo)));
#[allow(unused_mut)] #[allow(unused_mut)]
let mut state = AppState::new( let mut state = AppState::new(
user_service, user_service,
@@ -93,14 +99,17 @@ async fn main() -> anyhow::Result<()> {
handles.log_history, handles.log_history,
activity_log_repo, activity_log_repo,
db_pool, db_pool,
library_repo,
library_sync_adapter,
app_settings_repo,
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
transcode_settings_repo, transcode_settings_repo,
) )
.await?; .await?;
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
if let Some(idx) = bundle.local_index { if !bundle.local_index.is_empty() {
*state.local_index.write().await = Some(idx); *state.local_index.write().await = bundle.local_index;
} }
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
if let Some(tm) = bundle.transcode_manager { if let Some(tm) = bundle.transcode_manager {
@@ -113,5 +122,11 @@ async fn main() -> anyhow::Result<()> {
event_tx, event_tx,
); );
tokio::spawn(library_scheduler::run_library_sync(
Arc::clone(&state.library_sync_adapter),
Arc::clone(&state.provider_registry),
Arc::clone(&state.app_settings_repo),
));
server::build_and_serve(state, &config).await server::build_and_serve(state, &config).await
} }

View File

@@ -139,8 +139,8 @@ mod tests {
use chrono::{DateTime, Duration, Utc}; use chrono::{DateTime, Duration, Utc};
use domain::value_objects::{ChannelId, ContentType, UserId}; use domain::value_objects::{ChannelId, ContentType, UserId};
use domain::{ use domain::{
Channel, ChannelRepository, Collection, DomainResult, GeneratedSchedule, IProviderRegistry, BlockId, Channel, ChannelRepository, Collection, DomainResult, GeneratedSchedule,
MediaFilter, MediaItem, MediaItemId, PlaybackRecord, ProviderCapabilities, IProviderRegistry, MediaFilter, MediaItem, MediaItemId, PlaybackRecord, ProviderCapabilities,
ScheduleEngineService, ScheduleRepository, SeriesSummary, StreamQuality, ScheduleEngineService, ScheduleRepository, SeriesSummary, StreamQuality,
}; };
use tokio::sync::broadcast; use tokio::sync::broadcast;
@@ -172,6 +172,10 @@ mod tests {
async fn delete(&self, _id: ChannelId) -> DomainResult<()> { async fn delete(&self, _id: ChannelId) -> DomainResult<()> {
unimplemented!() unimplemented!()
} }
async fn save_config_snapshot(&self, _channel_id: ChannelId, _config: &domain::ScheduleConfig, _label: Option<String>) -> DomainResult<domain::ChannelConfigSnapshot> { unimplemented!() }
async fn list_config_snapshots(&self, _channel_id: ChannelId) -> DomainResult<Vec<domain::ChannelConfigSnapshot>> { unimplemented!() }
async fn get_config_snapshot(&self, _channel_id: ChannelId, _snapshot_id: Uuid) -> DomainResult<Option<domain::ChannelConfigSnapshot>> { unimplemented!() }
async fn patch_config_snapshot_label(&self, _channel_id: ChannelId, _snapshot_id: Uuid, _label: Option<String>) -> DomainResult<Option<domain::ChannelConfigSnapshot>> { unimplemented!() }
} }
struct MockScheduleRepo { struct MockScheduleRepo {
@@ -207,6 +211,15 @@ mod tests {
async fn save_playback_record(&self, _record: &PlaybackRecord) -> DomainResult<()> { async fn save_playback_record(&self, _record: &PlaybackRecord) -> DomainResult<()> {
Ok(()) Ok(())
} }
async fn find_last_slot_per_block(
&self,
_channel_id: ChannelId,
) -> DomainResult<HashMap<BlockId, MediaItemId>> {
Ok(HashMap::new())
}
async fn list_schedule_history(&self, _channel_id: ChannelId) -> DomainResult<Vec<GeneratedSchedule>> { unimplemented!() }
async fn get_schedule_by_id(&self, _channel_id: ChannelId, _schedule_id: Uuid) -> DomainResult<Option<GeneratedSchedule>> { unimplemented!() }
async fn delete_schedules_after(&self, _channel_id: ChannelId, _target_generation: u32) -> DomainResult<()> { unimplemented!() }
} }
struct MockRegistry; struct MockRegistry;
@@ -268,7 +281,7 @@ mod tests {
ch ch
} }
fn make_slot(channel_id: Uuid, slot_id: Uuid) -> domain::ScheduledSlot { fn make_slot(_channel_id: Uuid, slot_id: Uuid) -> domain::ScheduledSlot {
use domain::entities::MediaItem; use domain::entities::MediaItem;
let now = Utc::now(); let now = Utc::now();
domain::ScheduledSlot { domain::ScheduledSlot {
@@ -287,6 +300,8 @@ mod tests {
series_name: None, series_name: None,
season_number: None, season_number: None,
episode_number: None, episode_number: None,
thumbnail_url: None,
collection_id: None,
}, },
source_block_id: Uuid::new_v4(), source_block_id: Uuid::new_v4(),
} }
@@ -347,7 +362,7 @@ mod tests {
assert_eq!(cid, channel_id); assert_eq!(cid, channel_id);
assert_eq!(s.id, slot_id); assert_eq!(s.id, slot_id);
} }
other => panic!("expected BroadcastTransition, got something else"), _other => panic!("expected BroadcastTransition, got something else"),
} }
} }
@@ -425,6 +440,15 @@ mod tests {
async fn save_playback_record(&self, _: &PlaybackRecord) -> DomainResult<()> { async fn save_playback_record(&self, _: &PlaybackRecord) -> DomainResult<()> {
Ok(()) Ok(())
} }
async fn find_last_slot_per_block(
&self,
_: ChannelId,
) -> DomainResult<HashMap<BlockId, MediaItemId>> {
Ok(HashMap::new())
}
async fn list_schedule_history(&self, _: ChannelId) -> DomainResult<Vec<GeneratedSchedule>> { unimplemented!() }
async fn get_schedule_by_id(&self, _: ChannelId, _: Uuid) -> DomainResult<Option<GeneratedSchedule>> { unimplemented!() }
async fn delete_schedules_after(&self, _: ChannelId, _: u32) -> DomainResult<()> { unimplemented!() }
} }
let now = Utc::now(); let now = Utc::now();

View File

@@ -14,7 +14,7 @@ use infra::factory::build_transcode_settings_repository;
pub struct ProviderBundle { pub struct ProviderBundle {
pub registry: Arc<infra::ProviderRegistry>, pub registry: Arc<infra::ProviderRegistry>,
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
pub local_index: Option<Arc<infra::LocalIndex>>, pub local_index: std::collections::HashMap<String, Arc<infra::LocalIndex>>,
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
pub transcode_manager: Option<Arc<infra::TranscodeManager>>, pub transcode_manager: Option<Arc<infra::TranscodeManager>>,
} }
@@ -26,7 +26,7 @@ pub async fn build_provider_registry(
provider_config_repo: &Arc<dyn ProviderConfigRepository>, provider_config_repo: &Arc<dyn ProviderConfigRepository>,
) -> anyhow::Result<ProviderBundle> { ) -> anyhow::Result<ProviderBundle> {
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
let mut local_index: Option<Arc<infra::LocalIndex>> = None; let mut local_index: std::collections::HashMap<String, Arc<infra::LocalIndex>> = std::collections::HashMap::new();
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
let mut transcode_manager: Option<Arc<infra::TranscodeManager>> = None; let mut transcode_manager: Option<Arc<infra::TranscodeManager>> = None;
@@ -41,50 +41,53 @@ pub async fn build_provider_registry(
#[cfg(feature = "jellyfin")] #[cfg(feature = "jellyfin")]
"jellyfin" => { "jellyfin" => {
if let Ok(cfg) = serde_json::from_str::<infra::JellyfinConfig>(&row.config_json) { if let Ok(cfg) = serde_json::from_str::<infra::JellyfinConfig>(&row.config_json) {
tracing::info!("Loading Jellyfin provider from DB config"); tracing::info!("Loading Jellyfin provider [{}] from DB config", row.id);
registry.register("jellyfin", Arc::new(infra::JellyfinMediaProvider::new(cfg))); registry.register(&row.id, Arc::new(infra::JellyfinMediaProvider::new(cfg)));
} }
} }
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
"local_files" => { "local_files" => {
if let Ok(cfg_map) = serde_json::from_str::<std::collections::HashMap<String, String>>(&row.config_json) { if let Ok(cfg_map) = serde_json::from_str::<std::collections::HashMap<String, String>>(&row.config_json)
if let Some(files_dir) = cfg_map.get("files_dir") { && let Some(files_dir) = cfg_map.get("files_dir")
{
let transcode_dir = cfg_map.get("transcode_dir") let transcode_dir = cfg_map.get("transcode_dir")
.filter(|s| !s.is_empty()) .filter(|s| !s.is_empty())
.map(std::path::PathBuf::from); .map(std::path::PathBuf::from);
let cleanup_ttl_hours: u32 = cfg_map.get("cleanup_ttl_hours") let cleanup_ttl_hours: u32 = cfg_map.get("cleanup_ttl_hours")
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.unwrap_or(24); .unwrap_or(24);
tracing::info!("Loading local-files provider from DB config at {:?}", files_dir); tracing::info!("Loading local-files provider [{}] from DB config at {:?}", row.id, files_dir);
match infra::factory::build_local_files_bundle( match infra::factory::build_local_files_bundle(
db_pool, db_pool,
std::path::PathBuf::from(files_dir), std::path::PathBuf::from(files_dir),
transcode_dir, transcode_dir,
cleanup_ttl_hours, cleanup_ttl_hours,
config.base_url.clone(), config.base_url.clone(),
&row.id,
).await { ).await {
Ok(bundle) => { Ok(bundle) => {
let scan_idx = Arc::clone(&bundle.local_index); let scan_idx = Arc::clone(&bundle.local_index);
tokio::spawn(async move { scan_idx.rescan().await; }); tokio::spawn(async move { scan_idx.rescan().await; });
if let Some(ref tm) = bundle.transcode_manager { if let Some(ref tm) = bundle.transcode_manager {
tracing::info!("Transcoding enabled"); tracing::info!("Transcoding enabled for [{}]", row.id);
// Load persisted TTL override from transcode_settings table. // Load persisted TTL override from transcode_settings table.
let tm_clone = Arc::clone(tm); let tm_clone = Arc::clone(tm);
let repo = build_transcode_settings_repository(db_pool).await.ok(); let repo = build_transcode_settings_repository(db_pool).await.ok();
tokio::spawn(async move { tokio::spawn(async move {
if let Some(r) = repo { if let Some(r) = repo
if let Ok(Some(ttl)) = r.load_cleanup_ttl().await { && let Ok(Some(ttl)) = r.load_cleanup_ttl().await
{
tm_clone.set_cleanup_ttl(ttl); tm_clone.set_cleanup_ttl(ttl);
} }
}
}); });
} }
registry.register("local", bundle.provider); registry.register(&row.id, bundle.provider);
if transcode_manager.is_none() {
transcode_manager = bundle.transcode_manager; transcode_manager = bundle.transcode_manager;
local_index = Some(bundle.local_index);
} }
Err(e) => tracing::warn!("Failed to build local-files provider: {}", e), local_index.insert(row.id.clone(), bundle.local_index);
} }
Err(e) => tracing::warn!("Failed to build local-files provider [{}]: {}", row.id, e),
} }
} }
} }
@@ -115,6 +118,7 @@ pub async fn build_provider_registry(
config.transcode_dir.clone(), config.transcode_dir.clone(),
config.transcode_cleanup_ttl_hours, config.transcode_cleanup_ttl_hours,
config.base_url.clone(), config.base_url.clone(),
"local",
).await { ).await {
Ok(bundle) => { Ok(bundle) => {
let scan_idx = Arc::clone(&bundle.local_index); let scan_idx = Arc::clone(&bundle.local_index);
@@ -124,16 +128,16 @@ pub async fn build_provider_registry(
let tm_clone = Arc::clone(tm); let tm_clone = Arc::clone(tm);
let repo = build_transcode_settings_repository(db_pool).await.ok(); let repo = build_transcode_settings_repository(db_pool).await.ok();
tokio::spawn(async move { tokio::spawn(async move {
if let Some(r) = repo { if let Some(r) = repo
if let Ok(Some(ttl)) = r.load_cleanup_ttl().await { && let Ok(Some(ttl)) = r.load_cleanup_ttl().await
{
tm_clone.set_cleanup_ttl(ttl); tm_clone.set_cleanup_ttl(ttl);
} }
}
}); });
} }
registry.register("local", bundle.provider); registry.register("local", bundle.provider);
transcode_manager = bundle.transcode_manager; transcode_manager = bundle.transcode_manager;
local_index = Some(bundle.local_index); local_index.insert("local".to_string(), bundle.local_index);
} }
Err(e) => tracing::warn!("local-files requires SQLite; ignoring LOCAL_FILES_DIR: {}", e), Err(e) => tracing::warn!("local-files requires SQLite; ignoring LOCAL_FILES_DIR: {}", e),
} }

View File

@@ -1,6 +1,6 @@
//! Admin provider management routes. //! Admin provider management routes.
//! //!
//! All routes require an admin user. Allows listing, updating, deleting, and //! All routes require an admin user. Allows listing, creating, updating, deleting, and
//! testing media provider configs stored in the DB. Only available when //! testing media provider configs stored in the DB. Only available when
//! CONFIG_SOURCE=db. //! CONFIG_SOURCE=db.
@@ -11,7 +11,7 @@ use axum::Router;
use axum::extract::{Path, State}; use axum::extract::{Path, State};
use axum::http::StatusCode; use axum::http::StatusCode;
use axum::response::IntoResponse; use axum::response::IntoResponse;
use axum::routing::{get, post, put, delete}; use axum::routing::{get, post, put};
use axum::Json; use axum::Json;
use domain::errors::DomainResult; use domain::errors::DomainResult;
use domain::ProviderConfigRow; use domain::ProviderConfigRow;
@@ -26,14 +26,36 @@ use crate::state::AppState;
// DTOs // DTOs
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
/// Validate that an instance id is a safe slug (alphanumeric + hyphens, 1-40 chars).
fn is_valid_instance_id(id: &str) -> bool {
!id.is_empty()
&& id.len() <= 40
&& id.chars().all(|c| c.is_ascii_alphanumeric() || c == '-')
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct ProviderConfigPayload { pub struct CreateProviderRequest {
pub id: String,
pub provider_type: String,
pub config_json: HashMap<String, String>, pub config_json: HashMap<String, String>,
pub enabled: bool, pub enabled: bool,
} }
#[derive(Debug, Deserialize)]
pub struct UpdateProviderRequest {
pub config_json: HashMap<String, String>,
pub enabled: bool,
}
#[derive(Debug, Deserialize)]
pub struct TestProviderRequest {
pub provider_type: String,
pub config_json: HashMap<String, String>,
}
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub struct ProviderConfigResponse { pub struct ProviderConfigResponse {
pub id: String,
pub provider_type: String, pub provider_type: String,
pub config_json: HashMap<String, serde_json::Value>, pub config_json: HashMap<String, serde_json::Value>,
pub enabled: bool, pub enabled: bool,
@@ -51,9 +73,9 @@ pub struct TestResult {
pub fn router() -> Router<AppState> { pub fn router() -> Router<AppState> {
Router::new() Router::new()
.route("/", get(list_providers)) .route("/", get(list_providers).post(create_provider))
.route("/{type}", put(update_provider).delete(delete_provider)) .route("/{id}", put(update_provider).delete(delete_provider))
.route("/{type}/test", post(test_provider)) .route("/test", post(test_provider))
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@@ -97,6 +119,12 @@ async fn rebuild_registry(state: &AppState) -> DomainResult<()> {
let rows = state.provider_config_repo.get_all().await?; let rows = state.provider_config_repo.get_all().await?;
let mut new_registry = infra::ProviderRegistry::new(); let mut new_registry = infra::ProviderRegistry::new();
#[cfg(feature = "local-files")]
let mut new_local_index: std::collections::HashMap<String, Arc<infra::LocalIndex>> =
std::collections::HashMap::new();
#[cfg(feature = "local-files")]
let mut first_transcode_manager: Option<Arc<infra::TranscodeManager>> = None;
for row in &rows { for row in &rows {
if !row.enabled { if !row.enabled {
continue; continue;
@@ -108,7 +136,7 @@ async fn rebuild_registry(state: &AppState) -> DomainResult<()> {
serde_json::from_str::<infra::JellyfinConfig>(&row.config_json) serde_json::from_str::<infra::JellyfinConfig>(&row.config_json)
{ {
new_registry.register( new_registry.register(
"jellyfin", &row.id,
Arc::new(infra::JellyfinMediaProvider::new(cfg)), Arc::new(infra::JellyfinMediaProvider::new(cfg)),
); );
} }
@@ -144,16 +172,19 @@ async fn rebuild_registry(state: &AppState) -> DomainResult<()> {
transcode_dir, transcode_dir,
cleanup_ttl_hours, cleanup_ttl_hours,
base_url, base_url,
&row.id,
).await { ).await {
Ok(bundle) => { Ok(bundle) => {
let scan_idx = Arc::clone(&bundle.local_index); let scan_idx = Arc::clone(&bundle.local_index);
tokio::spawn(async move { scan_idx.rescan().await; }); tokio::spawn(async move { scan_idx.rescan().await; });
new_registry.register("local", bundle.provider); new_registry.register(&row.id, bundle.provider);
*state.local_index.write().await = Some(bundle.local_index); new_local_index.insert(row.id.clone(), bundle.local_index);
*state.transcode_manager.write().await = bundle.transcode_manager; if first_transcode_manager.is_none() {
first_transcode_manager = bundle.transcode_manager;
}
} }
Err(e) => { Err(e) => {
tracing::warn!("local_files provider requires SQLite; skipping: {}", e); tracing::warn!("local_files provider [{}] requires SQLite; skipping: {}", row.id, e);
continue; continue;
} }
} }
@@ -167,6 +198,11 @@ async fn rebuild_registry(state: &AppState) -> DomainResult<()> {
} }
*state.provider_registry.write().await = Arc::new(new_registry); *state.provider_registry.write().await = Arc::new(new_registry);
#[cfg(feature = "local-files")]
{
*state.local_index.write().await = new_local_index;
*state.transcode_manager.write().await = first_transcode_manager;
}
Ok(()) Ok(())
} }
@@ -187,6 +223,7 @@ pub async fn list_providers(
let response: Vec<ProviderConfigResponse> = rows let response: Vec<ProviderConfigResponse> = rows
.iter() .iter()
.map(|row| ProviderConfigResponse { .map(|row| ProviderConfigResponse {
id: row.id.clone(),
provider_type: row.provider_type.clone(), provider_type: row.provider_type.clone(),
config_json: mask_config(&row.config_json), config_json: mask_config(&row.config_json),
enabled: row.enabled, enabled: row.enabled,
@@ -196,29 +233,49 @@ pub async fn list_providers(
Ok(Json(response)) Ok(Json(response))
} }
pub async fn update_provider( pub async fn create_provider(
State(state): State<AppState>, State(state): State<AppState>,
AdminUser(_user): AdminUser, AdminUser(_user): AdminUser,
Path(provider_type): Path<String>, Json(payload): Json<CreateProviderRequest>,
Json(payload): Json<ProviderConfigPayload>,
) -> Result<impl IntoResponse, ApiError> { ) -> Result<impl IntoResponse, ApiError> {
if state.config.config_source != ConfigSource::Db { if state.config.config_source != ConfigSource::Db {
return Ok(conflict_response().into_response()); return Ok(conflict_response().into_response());
} }
let known = matches!(provider_type.as_str(), "jellyfin" | "local_files"); if !is_valid_instance_id(&payload.id) {
return Err(ApiError::Validation(
"Instance id must be 1-40 alphanumeric+hyphen characters".to_string(),
));
}
let known = matches!(payload.provider_type.as_str(), "jellyfin" | "local_files");
if !known { if !known {
return Err(ApiError::Validation(format!( return Err(ApiError::Validation(format!(
"Unknown provider type: {}", "Unknown provider type: {}",
provider_type payload.provider_type
))); )));
} }
// Check for uniqueness
if state
.provider_config_repo
.get_by_id(&payload.id)
.await
.map_err(ApiError::from)?
.is_some()
{
return Ok((
StatusCode::CONFLICT,
Json(serde_json::json!({ "error": format!("Provider instance '{}' already exists", payload.id) })),
).into_response());
}
let config_json = serde_json::to_string(&payload.config_json) let config_json = serde_json::to_string(&payload.config_json)
.map_err(|e| ApiError::Internal(format!("Failed to serialize config: {}", e)))?; .map_err(|e| ApiError::Internal(format!("Failed to serialize config: {}", e)))?;
let row = ProviderConfigRow { let row = ProviderConfigRow {
provider_type: provider_type.clone(), id: payload.id.clone(),
provider_type: payload.provider_type.clone(),
config_json: config_json.clone(), config_json: config_json.clone(),
enabled: payload.enabled, enabled: payload.enabled,
updated_at: chrono::Utc::now().to_rfc3339(), updated_at: chrono::Utc::now().to_rfc3339(),
@@ -235,7 +292,56 @@ pub async fn update_provider(
.map_err(ApiError::from)?; .map_err(ApiError::from)?;
let response = ProviderConfigResponse { let response = ProviderConfigResponse {
provider_type, id: payload.id,
provider_type: payload.provider_type,
config_json: mask_config(&config_json),
enabled: payload.enabled,
};
Ok((StatusCode::CREATED, Json(response)).into_response())
}
pub async fn update_provider(
State(state): State<AppState>,
AdminUser(_user): AdminUser,
Path(instance_id): Path<String>,
Json(payload): Json<UpdateProviderRequest>,
) -> Result<impl IntoResponse, ApiError> {
if state.config.config_source != ConfigSource::Db {
return Ok(conflict_response().into_response());
}
let existing = state
.provider_config_repo
.get_by_id(&instance_id)
.await
.map_err(ApiError::from)?
.ok_or_else(|| ApiError::NotFound(format!("Provider instance '{}' not found", instance_id)))?;
let config_json = serde_json::to_string(&payload.config_json)
.map_err(|e| ApiError::Internal(format!("Failed to serialize config: {}", e)))?;
let row = ProviderConfigRow {
id: existing.id.clone(),
provider_type: existing.provider_type.clone(),
config_json: config_json.clone(),
enabled: payload.enabled,
updated_at: chrono::Utc::now().to_rfc3339(),
};
state
.provider_config_repo
.upsert(&row)
.await
.map_err(ApiError::from)?;
rebuild_registry(&state)
.await
.map_err(ApiError::from)?;
let response = ProviderConfigResponse {
id: existing.id,
provider_type: existing.provider_type,
config_json: mask_config(&config_json), config_json: mask_config(&config_json),
enabled: payload.enabled, enabled: payload.enabled,
}; };
@@ -246,7 +352,7 @@ pub async fn update_provider(
pub async fn delete_provider( pub async fn delete_provider(
State(state): State<AppState>, State(state): State<AppState>,
AdminUser(_user): AdminUser, AdminUser(_user): AdminUser,
Path(provider_type): Path<String>, Path(instance_id): Path<String>,
) -> Result<impl IntoResponse, ApiError> { ) -> Result<impl IntoResponse, ApiError> {
if state.config.config_source != ConfigSource::Db { if state.config.config_source != ConfigSource::Db {
return Ok(conflict_response().into_response()); return Ok(conflict_response().into_response());
@@ -254,7 +360,7 @@ pub async fn delete_provider(
state state
.provider_config_repo .provider_config_repo
.delete(&provider_type) .delete(&instance_id)
.await .await
.map_err(ApiError::from)?; .map_err(ApiError::from)?;
@@ -268,10 +374,9 @@ pub async fn delete_provider(
pub async fn test_provider( pub async fn test_provider(
State(_state): State<AppState>, State(_state): State<AppState>,
AdminUser(_user): AdminUser, AdminUser(_user): AdminUser,
Path(provider_type): Path<String>, Json(payload): Json<TestProviderRequest>,
Json(payload): Json<ProviderConfigPayload>,
) -> Result<impl IntoResponse, ApiError> { ) -> Result<impl IntoResponse, ApiError> {
let result = match provider_type.as_str() { let result = match payload.provider_type.as_str() {
"jellyfin" => test_jellyfin(&payload.config_json).await, "jellyfin" => test_jellyfin(&payload.config_json).await,
"local_files" => test_local_files(&payload.config_json), "local_files" => test_local_files(&payload.config_json),
_ => TestResult { _ => TestResult {

View File

@@ -6,13 +6,13 @@ use axum::{
}; };
use crate::{ use crate::{
dto::{LoginRequest, RegisterRequest, TokenResponse, UserResponse}, dto::{LoginRequest, RefreshRequest, RegisterRequest, TokenResponse, UserResponse},
error::ApiError, error::ApiError,
extractors::CurrentUser, extractors::CurrentUser,
state::AppState, state::AppState,
}; };
use super::create_jwt; use super::{create_jwt, create_refresh_jwt};
/// Login with email + password → JWT token /// Login with email + password → JWT token
pub(super) async fn login( pub(super) async fn login(
@@ -35,6 +35,11 @@ pub(super) async fn login(
} }
let token = create_jwt(&user, &state)?; let token = create_jwt(&user, &state)?;
let refresh_token = if payload.remember_me {
Some(create_refresh_jwt(&user, &state)?)
} else {
None
};
let _ = state.activity_log_repo.log("user_login", user.email.as_ref(), None).await; let _ = state.activity_log_repo.log("user_login", user.email.as_ref(), None).await;
Ok(( Ok((
@@ -43,6 +48,7 @@ pub(super) async fn login(
access_token: token, access_token: token,
token_type: "Bearer".to_string(), token_type: "Bearer".to_string(),
expires_in: state.config.jwt_expiry_hours * 3600, expires_in: state.config.jwt_expiry_hours * 3600,
refresh_token,
}), }),
)) ))
} }
@@ -71,6 +77,7 @@ pub(super) async fn register(
access_token: token, access_token: token,
token_type: "Bearer".to_string(), token_type: "Bearer".to_string(),
expires_in: state.config.jwt_expiry_hours * 3600, expires_in: state.config.jwt_expiry_hours * 3600,
refresh_token: None,
}), }),
)) ))
} }
@@ -90,6 +97,46 @@ pub(super) async fn me(CurrentUser(user): CurrentUser) -> Result<impl IntoRespon
})) }))
} }
/// Exchange a valid refresh token for a new access + refresh token pair
#[cfg(feature = "auth-jwt")]
pub(super) async fn refresh_token(
State(state): State<AppState>,
Json(payload): Json<RefreshRequest>,
) -> Result<impl IntoResponse, ApiError> {
let validator = state
.jwt_validator
.as_ref()
.ok_or_else(|| ApiError::Internal("JWT not configured".to_string()))?;
let claims = validator
.validate_refresh_token(&payload.refresh_token)
.map_err(|e| {
tracing::debug!("Refresh token validation failed: {:?}", e);
ApiError::Unauthorized("Invalid or expired refresh token".to_string())
})?;
let user_id: uuid::Uuid = claims
.sub
.parse()
.map_err(|_| ApiError::Unauthorized("Invalid user ID in token".to_string()))?;
let user = state
.user_service
.find_by_id(user_id)
.await
.map_err(|e| ApiError::Internal(format!("Failed to fetch user: {}", e)))?;
let access_token = create_jwt(&user, &state)?;
let new_refresh_token = create_refresh_jwt(&user, &state)?;
Ok(Json(TokenResponse {
access_token,
token_type: "Bearer".to_string(),
expires_in: state.config.jwt_expiry_hours * 3600,
refresh_token: Some(new_refresh_token),
}))
}
/// Issue a new JWT for the currently authenticated user (OIDC→JWT exchange or token refresh) /// Issue a new JWT for the currently authenticated user (OIDC→JWT exchange or token refresh)
#[cfg(feature = "auth-jwt")] #[cfg(feature = "auth-jwt")]
pub(super) async fn get_token( pub(super) async fn get_token(
@@ -102,5 +149,6 @@ pub(super) async fn get_token(
access_token: token, access_token: token,
token_type: "Bearer".to_string(), token_type: "Bearer".to_string(),
expires_in: state.config.jwt_expiry_hours * 3600, expires_in: state.config.jwt_expiry_hours * 3600,
refresh_token: None,
})) }))
} }

View File

@@ -18,7 +18,9 @@ pub fn router() -> Router<AppState> {
.route("/me", get(local::me)); .route("/me", get(local::me));
#[cfg(feature = "auth-jwt")] #[cfg(feature = "auth-jwt")]
let r = r.route("/token", post(local::get_token)); let r = r
.route("/token", post(local::get_token))
.route("/refresh", post(local::refresh_token));
#[cfg(feature = "auth-oidc")] #[cfg(feature = "auth-oidc")]
let r = r let r = r
@@ -28,7 +30,7 @@ pub fn router() -> Router<AppState> {
r r
} }
/// Helper: create JWT for a user /// Helper: create access JWT for a user
#[cfg(feature = "auth-jwt")] #[cfg(feature = "auth-jwt")]
pub(super) fn create_jwt(user: &domain::User, state: &AppState) -> Result<String, ApiError> { pub(super) fn create_jwt(user: &domain::User, state: &AppState) -> Result<String, ApiError> {
let validator = state let validator = state
@@ -45,3 +47,21 @@ pub(super) fn create_jwt(user: &domain::User, state: &AppState) -> Result<String
pub(super) fn create_jwt(_user: &domain::User, _state: &AppState) -> Result<String, ApiError> { pub(super) fn create_jwt(_user: &domain::User, _state: &AppState) -> Result<String, ApiError> {
Err(ApiError::Internal("JWT feature not enabled".to_string())) Err(ApiError::Internal("JWT feature not enabled".to_string()))
} }
/// Helper: create refresh JWT for a user
#[cfg(feature = "auth-jwt")]
pub(super) fn create_refresh_jwt(user: &domain::User, state: &AppState) -> Result<String, ApiError> {
let validator = state
.jwt_validator
.as_ref()
.ok_or_else(|| ApiError::Internal("JWT not configured".to_string()))?;
validator
.create_refresh_token(user)
.map_err(|e| ApiError::Internal(format!("Failed to create refresh token: {}", e)))
}
#[cfg(not(feature = "auth-jwt"))]
pub(super) fn create_refresh_jwt(_user: &domain::User, _state: &AppState) -> Result<String, ApiError> {
Err(ApiError::Internal("JWT feature not enabled".to_string()))
}

View File

@@ -61,8 +61,7 @@ pub(super) async fn get_current_broadcast(
Some(broadcast) => { Some(broadcast) => {
let block_access_mode = channel let block_access_mode = channel
.schedule_config .schedule_config
.blocks .all_blocks()
.iter()
.find(|b| b.id == broadcast.slot.source_block_id) .find(|b| b.id == broadcast.slot.source_block_id)
.map(|b| b.access_mode.clone()) .map(|b| b.access_mode.clone())
.unwrap_or_default(); .unwrap_or_default();
@@ -168,8 +167,7 @@ pub(super) async fn get_stream(
// Block-level access check // Block-level access check
if let Some(block) = channel if let Some(block) = channel
.schedule_config .schedule_config
.blocks .all_blocks()
.iter()
.find(|b| b.id == broadcast.slot.source_block_id) .find(|b| b.id == broadcast.slot.source_block_id)
{ {
check_access( check_access(

View File

@@ -0,0 +1,72 @@
use axum::{
Json,
extract::{Path, State},
http::StatusCode,
response::IntoResponse,
};
use uuid::Uuid;
use crate::{
dto::{ChannelResponse, ConfigSnapshotResponse, PatchSnapshotRequest},
error::ApiError,
extractors::CurrentUser,
state::AppState,
};
use super::require_owner;
pub(super) async fn list_config_history(
State(state): State<AppState>,
CurrentUser(user): CurrentUser,
Path(channel_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let channel = state.channel_service.find_by_id(channel_id).await?;
require_owner(&channel, user.id)?;
let snapshots = state.channel_service.list_config_snapshots(channel_id).await?;
let response: Vec<ConfigSnapshotResponse> = snapshots.into_iter().map(Into::into).collect();
Ok(Json(response))
}
pub(super) async fn patch_config_snapshot(
State(state): State<AppState>,
CurrentUser(user): CurrentUser,
Path((channel_id, snap_id)): Path<(Uuid, Uuid)>,
Json(payload): Json<PatchSnapshotRequest>,
) -> Result<impl IntoResponse, ApiError> {
let channel = state.channel_service.find_by_id(channel_id).await?;
require_owner(&channel, user.id)?;
let updated = state
.channel_service
.patch_config_snapshot_label(channel_id, snap_id, payload.label)
.await?
.ok_or_else(|| ApiError::NotFound("Snapshot not found".into()))?;
Ok(Json(ConfigSnapshotResponse::from(updated)))
}
pub(super) async fn restore_config_snapshot(
State(state): State<AppState>,
CurrentUser(user): CurrentUser,
Path((channel_id, snap_id)): Path<(Uuid, Uuid)>,
) -> Result<impl IntoResponse, ApiError> {
let channel = state.channel_service.find_by_id(channel_id).await?;
require_owner(&channel, user.id)?;
let updated = state
.channel_service
.restore_config_snapshot(channel_id, snap_id)
.await
.map_err(|e| match e {
domain::DomainError::ChannelNotFound(_) => ApiError::NotFound("Snapshot not found".into()),
other => ApiError::from(other),
})?;
let _ = state
.activity_log_repo
.log("config_restored", &snap_id.to_string(), Some(channel_id))
.await;
Ok((StatusCode::OK, Json(ChannelResponse::from(updated))))
}

View File

@@ -102,7 +102,7 @@ pub(super) async fn update_channel(
channel.timezone = tz; channel.timezone = tz;
} }
if let Some(sc) = payload.schedule_config { if let Some(sc) = payload.schedule_config {
channel.schedule_config = sc; channel.schedule_config = domain::ScheduleConfig::from(sc);
} }
if let Some(rp) = payload.recycle_policy { if let Some(rp) = payload.recycle_policy {
channel.recycle_policy = rp; channel.recycle_policy = rp;

View File

@@ -13,6 +13,7 @@ use domain::{AccessMode, User};
use crate::{error::ApiError, state::AppState}; use crate::{error::ApiError, state::AppState};
mod broadcast; mod broadcast;
mod config_history;
mod crud; mod crud;
mod schedule; mod schedule;
@@ -27,9 +28,30 @@ pub fn router() -> Router<AppState> {
"/{id}/schedule", "/{id}/schedule",
post(schedule::generate_schedule).get(schedule::get_active_schedule), post(schedule::generate_schedule).get(schedule::get_active_schedule),
) )
.route("/{id}/schedule/history", get(schedule::list_schedule_history))
.route(
"/{id}/schedule/history/{gen_id}",
get(schedule::get_schedule_history_entry),
)
.route(
"/{id}/schedule/history/{gen_id}/rollback",
post(schedule::rollback_schedule),
)
.route("/{id}/now", get(broadcast::get_current_broadcast)) .route("/{id}/now", get(broadcast::get_current_broadcast))
.route("/{id}/epg", get(broadcast::get_epg)) .route("/{id}/epg", get(broadcast::get_epg))
.route("/{id}/stream", get(broadcast::get_stream)) .route("/{id}/stream", get(broadcast::get_stream))
.route(
"/{id}/config/history",
get(config_history::list_config_history),
)
.route(
"/{id}/config/history/{snap_id}",
axum::routing::patch(config_history::patch_config_snapshot),
)
.route(
"/{id}/config/history/{snap_id}/restore",
post(config_history::restore_config_snapshot),
)
} }
// ============================================================================ // ============================================================================

View File

@@ -10,7 +10,7 @@ use uuid::Uuid;
use domain::{self, DomainError}; use domain::{self, DomainError};
use crate::{ use crate::{
dto::ScheduleResponse, dto::{ScheduleHistoryEntry, ScheduleResponse},
error::ApiError, error::ApiError,
extractors::CurrentUser, extractors::CurrentUser,
state::AppState, state::AppState,
@@ -18,7 +18,7 @@ use crate::{
use super::require_owner; use super::require_owner;
/// Trigger 48-hour schedule generation for a channel, starting from now. /// Trigger 7-day schedule generation for a channel, starting from now.
/// Replaces any existing schedule for the same window. /// Replaces any existing schedule for the same window.
pub(super) async fn generate_schedule( pub(super) async fn generate_schedule(
State(state): State<AppState>, State(state): State<AppState>,
@@ -42,7 +42,7 @@ pub(super) async fn generate_schedule(
Ok((StatusCode::CREATED, Json(ScheduleResponse::from(schedule)))) Ok((StatusCode::CREATED, Json(ScheduleResponse::from(schedule))))
} }
/// Return the currently active 48-hour schedule for a channel. /// Return the currently active 7-day schedule for a channel.
/// 404 if no schedule has been generated yet — call POST /:id/schedule first. /// 404 if no schedule has been generated yet — call POST /:id/schedule first.
pub(super) async fn get_active_schedule( pub(super) async fn get_active_schedule(
State(state): State<AppState>, State(state): State<AppState>,
@@ -60,3 +60,75 @@ pub(super) async fn get_active_schedule(
Ok(Json(ScheduleResponse::from(schedule))) Ok(Json(ScheduleResponse::from(schedule)))
} }
/// List all schedule generations for a channel, newest first.
/// Returns lightweight entries (no slots).
pub(super) async fn list_schedule_history(
State(state): State<AppState>,
CurrentUser(user): CurrentUser,
Path(channel_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let channel = state.channel_service.find_by_id(channel_id).await?;
require_owner(&channel, user.id)?;
let history = state.schedule_engine.list_schedule_history(channel_id).await?;
let entries: Vec<ScheduleHistoryEntry> = history.into_iter().map(Into::into).collect();
Ok(Json(entries))
}
/// Fetch a single historical schedule with all its slots.
pub(super) async fn get_schedule_history_entry(
State(state): State<AppState>,
CurrentUser(user): CurrentUser,
Path((channel_id, gen_id)): Path<(Uuid, Uuid)>,
) -> Result<impl IntoResponse, ApiError> {
let channel = state.channel_service.find_by_id(channel_id).await?;
require_owner(&channel, user.id)?;
let schedule = state
.schedule_engine
.get_schedule_by_id(channel_id, gen_id)
.await?
.ok_or_else(|| ApiError::NotFound(format!("Schedule {} not found", gen_id)))?;
Ok(Json(ScheduleResponse::from(schedule)))
}
/// Roll back to a previous schedule generation.
///
/// Deletes all generations after `gen_id`'s generation, then generates a fresh
/// schedule from now (inheriting the rolled-back generation as the base for
/// recycle-policy history).
pub(super) async fn rollback_schedule(
State(state): State<AppState>,
CurrentUser(user): CurrentUser,
Path((channel_id, gen_id)): Path<(Uuid, Uuid)>,
) -> Result<impl IntoResponse, ApiError> {
let channel = state.channel_service.find_by_id(channel_id).await?;
require_owner(&channel, user.id)?;
let target = state
.schedule_engine
.get_schedule_by_id(channel_id, gen_id)
.await?
.ok_or_else(|| ApiError::NotFound(format!("Schedule {} not found", gen_id)))?;
state
.schedule_engine
.delete_schedules_after(channel_id, target.generation)
.await?;
let schedule = state
.schedule_engine
.generate_schedule(channel_id, Utc::now())
.await?;
let _ = state.event_tx.send(domain::DomainEvent::ScheduleGenerated {
channel_id,
schedule: schedule.clone(),
});
let detail = format!("rollback to gen {}; {} slots", target.generation, schedule.slots.len());
let _ = state.activity_log_repo.log("schedule_rollback", &detail, Some(channel_id)).await;
Ok(Json(ScheduleResponse::from(schedule)))
}

View File

@@ -8,6 +8,7 @@ pub fn router() -> Router<AppState> {
Router::new().route("/", get(get_config)) Router::new().route("/", get(get_config))
} }
#[allow(clippy::vec_init_then_push)]
async fn get_config(State(state): State<AppState>) -> Json<ConfigResponse> { async fn get_config(State(state): State<AppState>) -> Json<ConfigResponse> {
let registry = state.provider_registry.read().await; let registry = state.provider_registry.read().await;

View File

@@ -22,6 +22,7 @@ use crate::{error::ApiError, state::AppState};
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
use axum::{ use axum::{
Json, Json,
extract::Query,
http::StatusCode, http::StatusCode,
routing::{delete, post}, routing::{delete, post},
}; };
@@ -53,6 +54,7 @@ pub fn router() -> Router<AppState> {
// Direct streaming // Direct streaming
// ============================================================================ // ============================================================================
#[cfg_attr(not(feature = "local-files"), allow(unused_variables))]
async fn stream_file( async fn stream_file(
State(state): State<AppState>, State(state): State<AppState>,
Path(encoded_id): Path<String>, Path(encoded_id): Path<String>,
@@ -131,7 +133,7 @@ async fn stream_file(
); );
} }
return builder.body(body).map_err(|e| ApiError::internal(e.to_string())); builder.body(body).map_err(|e| ApiError::internal(e.to_string()))
} }
#[cfg(not(feature = "local-files"))] #[cfg(not(feature = "local-files"))]
@@ -142,13 +144,25 @@ async fn stream_file(
// Rescan // Rescan
// ============================================================================ // ============================================================================
#[cfg(feature = "local-files")]
#[derive(Deserialize)]
struct RescanQuery {
provider: Option<String>,
}
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
async fn trigger_rescan( async fn trigger_rescan(
State(state): State<AppState>, State(state): State<AppState>,
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
Query(query): Query<RescanQuery>,
) -> Result<Json<serde_json::Value>, ApiError> { ) -> Result<Json<serde_json::Value>, ApiError> {
let index = state.local_index.read().await.clone() let map = state.local_index.read().await.clone();
.ok_or_else(|| ApiError::not_implemented("no local files provider active"))?; let index = if let Some(id) = &query.provider {
map.get(id).cloned()
} else {
map.values().next().cloned()
};
let index = index.ok_or_else(|| ApiError::not_implemented("no local files provider active"))?;
let count = index.rescan().await; let count = index.rescan().await;
Ok(Json(serde_json::json!({ "items_found": count }))) Ok(Json(serde_json::json!({ "items_found": count })))
} }
@@ -316,6 +330,7 @@ async fn clear_transcode_cache(
// Helpers // Helpers
// ============================================================================ // ============================================================================
#[cfg(feature = "local-files")]
fn content_type_for_ext(ext: &str) -> &'static str { fn content_type_for_ext(ext: &str) -> &'static str {
match ext { match ext {
"mp4" | "m4v" => "video/mp4", "mp4" | "m4v" => "video/mp4",
@@ -327,6 +342,7 @@ fn content_type_for_ext(ext: &str) -> &'static str {
} }
} }
#[cfg(feature = "local-files")]
fn parse_range(range: &str, file_size: u64) -> Option<(u64, u64)> { fn parse_range(range: &str, file_size: u64) -> Option<(u64, u64)> {
let range = range.strip_prefix("bytes=")?; let range = range.strip_prefix("bytes=")?;
let (start_str, end_str) = range.split_once('-')?; let (start_str, end_str) = range.split_once('-')?;

View File

@@ -1,25 +1,39 @@
//! Library browsing routes //! Library routes — DB-backed.
//! //!
//! These endpoints expose the media provider's library to the dashboard so //! GET /library/collections — collections derived from synced items
//! users can discover what's available without knowing provider-internal IDs. //! GET /library/series — series names
//! All routes require authentication. //! GET /library/genres — genres
//! GET /library/items — search / browse
//! GET /library/items/:id — single item
//! GET /library/sync/status — latest sync log per provider
//! POST /library/sync — trigger an ad-hoc sync (auth)
//! //!
//! GET /library/collections — top-level libraries (Jellyfin views, Plex sections) //! Admin (nested under /admin/library):
//! GET /library/series — TV series, optionally scoped to a collection //! GET /admin/library/settings — app_settings key/value
//! GET /library/genres — available genres, optionally filtered by content type //! PUT /admin/library/settings — update app_settings
//! GET /library/items — search / browse items (used for block filter preview)
use std::collections::HashMap;
use std::sync::Arc;
use axum::{ use axum::{
Json, Router, Json, Router,
extract::{Query, RawQuery, State}, extract::{Path, Query, RawQuery, State},
routing::get, http::StatusCode,
response::IntoResponse,
routing::{get, post, put},
}; };
use domain::IProviderRegistry as _; use domain::{ContentType, ILibraryRepository, LibrarySearchFilter, LibrarySyncAdapter};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use domain::{Collection, ContentType, MediaFilter, SeriesSummary}; use crate::{
error::ApiError,
extractors::{AdminUser, CurrentUser},
state::AppState,
};
use crate::{error::ApiError, extractors::CurrentUser, state::AppState}; // ============================================================================
// Routers
// ============================================================================
pub fn router() -> Router<AppState> { pub fn router() -> Router<AppState> {
Router::new() Router::new()
@@ -27,6 +41,15 @@ pub fn router() -> Router<AppState> {
.route("/series", get(list_series)) .route("/series", get(list_series))
.route("/genres", get(list_genres)) .route("/genres", get(list_genres))
.route("/items", get(search_items)) .route("/items", get(search_items))
.route("/items/{id}", get(get_item))
.route("/shows", get(list_shows))
.route("/shows/{name}/seasons", get(list_seasons))
.route("/sync/status", get(sync_status))
.route("/sync", post(trigger_sync))
}
pub fn admin_router() -> Router<AppState> {
Router::new().route("/settings", get(get_settings).put(update_settings))
} }
// ============================================================================ // ============================================================================
@@ -41,38 +64,6 @@ struct CollectionResponse {
collection_type: Option<String>, collection_type: Option<String>,
} }
impl From<Collection> for CollectionResponse {
fn from(c: Collection) -> Self {
Self {
id: c.id,
name: c.name,
collection_type: c.collection_type,
}
}
}
#[derive(Debug, Serialize)]
struct SeriesResponse {
id: String,
name: String,
episode_count: u32,
genres: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
year: Option<u16>,
}
impl From<SeriesSummary> for SeriesResponse {
fn from(s: SeriesSummary) -> Self {
Self {
id: s.id,
name: s.name,
episode_count: s.episode_count,
genres: s.genres,
year: s.year,
}
}
}
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
struct LibraryItemResponse { struct LibraryItemResponse {
id: String, id: String,
@@ -88,6 +79,45 @@ struct LibraryItemResponse {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
year: Option<u16>, year: Option<u16>,
genres: Vec<String>, genres: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
thumbnail_url: Option<String>,
}
#[derive(Debug, Serialize)]
struct PagedResponse<T: Serialize> {
items: Vec<T>,
total: u32,
}
#[derive(Debug, Serialize)]
struct ShowSummaryResponse {
series_name: String,
episode_count: u32,
season_count: u32,
#[serde(skip_serializing_if = "Option::is_none")]
thumbnail_url: Option<String>,
genres: Vec<String>,
}
#[derive(Debug, Serialize)]
struct SeasonSummaryResponse {
season_number: u32,
episode_count: u32,
#[serde(skip_serializing_if = "Option::is_none")]
thumbnail_url: Option<String>,
}
#[derive(Debug, Serialize)]
struct SyncLogResponse {
id: i64,
provider_id: String,
started_at: String,
#[serde(skip_serializing_if = "Option::is_none")]
finished_at: Option<String>,
items_found: u32,
status: String,
#[serde(skip_serializing_if = "Option::is_none")]
error_msg: Option<String>,
} }
// ============================================================================ // ============================================================================
@@ -96,47 +126,47 @@ struct LibraryItemResponse {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct CollectionsQuery { struct CollectionsQuery {
/// Provider key to query (default: primary).
provider: Option<String>, provider: Option<String>,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct SeriesQuery { struct SeriesQuery {
/// Scope results to a specific collection (provider library ID).
collection: Option<String>,
/// Provider key to query (default: primary).
provider: Option<String>, provider: Option<String>,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct GenresQuery { struct GenresQuery {
/// Limit genres to a content type: "movie", "episode", or "short".
#[serde(rename = "type")] #[serde(rename = "type")]
content_type: Option<String>, content_type: Option<String>,
/// Provider key to query (default: primary).
provider: Option<String>, provider: Option<String>,
} }
#[derive(Debug, Default, Deserialize)] #[derive(Debug, Default, Deserialize)]
struct ItemsQuery { struct ItemsQuery {
/// Free-text search.
q: Option<String>, q: Option<String>,
/// Content type filter: "movie", "episode", or "short".
#[serde(rename = "type")] #[serde(rename = "type")]
content_type: Option<String>, content_type: Option<String>,
/// Filter episodes by series name. Repeat the param for multiple series:
/// `?series[]=iCarly&series[]=Victorious`
#[serde(default)] #[serde(default)]
series: Vec<String>, series: Vec<String>,
/// Scope to a provider collection ID. #[serde(default)]
genres: Vec<String>,
collection: Option<String>, collection: Option<String>,
/// Maximum number of results (default: 50, max: 200). limit: Option<u32>,
limit: Option<usize>, offset: Option<u32>,
/// Fill strategy to simulate: "random" | "sequential" | "best_fit". provider: Option<String>,
/// Applies the same ordering the schedule engine would use so the preview season: Option<u32>,
/// reflects what will actually be scheduled. }
strategy: Option<String>,
/// Provider key to query (default: primary). #[derive(Debug, Default, Deserialize)]
struct ShowsQuery {
q: Option<String>,
provider: Option<String>,
#[serde(default)]
genres: Vec<String>,
}
#[derive(Debug, Deserialize)]
struct SeasonsQuery {
provider: Option<String>, provider: Option<String>,
} }
@@ -144,130 +174,275 @@ struct ItemsQuery {
// Handlers // Handlers
// ============================================================================ // ============================================================================
/// List top-level collections (Jellyfin virtual libraries, Plex sections, etc.)
async fn list_collections( async fn list_collections(
State(state): State<AppState>, State(state): State<AppState>,
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
Query(params): Query<CollectionsQuery>, Query(params): Query<CollectionsQuery>,
) -> Result<Json<Vec<CollectionResponse>>, ApiError> { ) -> Result<Json<Vec<CollectionResponse>>, ApiError> {
let provider_id = params.provider.as_deref().unwrap_or(""); let cols = state
let registry = state.provider_registry.read().await; .library_repo
let caps = registry.capabilities(provider_id).ok_or_else(|| { .list_collections(params.provider.as_deref())
ApiError::validation(format!("Unknown provider '{}'", provider_id)) .await?;
})?; let resp = cols
if !caps.collections { .into_iter()
return Err(ApiError::not_implemented("collections not supported by this provider")); .map(|c| CollectionResponse {
} id: c.id,
let collections = registry.list_collections(provider_id).await?; name: c.name,
Ok(Json(collections.into_iter().map(Into::into).collect())) collection_type: c.collection_type,
})
.collect();
Ok(Json(resp))
} }
/// List TV series, optionally scoped to a collection.
async fn list_series( async fn list_series(
State(state): State<AppState>, State(state): State<AppState>,
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
Query(params): Query<SeriesQuery>, Query(params): Query<SeriesQuery>,
) -> Result<Json<Vec<SeriesResponse>>, ApiError> { ) -> Result<Json<Vec<String>>, ApiError> {
let provider_id = params.provider.as_deref().unwrap_or(""); let series = state
let registry = state.provider_registry.read().await; .library_repo
let caps = registry.capabilities(provider_id).ok_or_else(|| { .list_series(params.provider.as_deref())
ApiError::validation(format!("Unknown provider '{}'", provider_id))
})?;
if !caps.series {
return Err(ApiError::not_implemented("series not supported by this provider"));
}
let series = registry
.list_series(provider_id, params.collection.as_deref())
.await?; .await?;
Ok(Json(series.into_iter().map(Into::into).collect())) Ok(Json(series))
} }
/// List available genres, optionally filtered to a content type.
async fn list_genres( async fn list_genres(
State(state): State<AppState>, State(state): State<AppState>,
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
Query(params): Query<GenresQuery>, Query(params): Query<GenresQuery>,
) -> Result<Json<Vec<String>>, ApiError> { ) -> Result<Json<Vec<String>>, ApiError> {
let provider_id = params.provider.as_deref().unwrap_or("");
let registry = state.provider_registry.read().await;
let caps = registry.capabilities(provider_id).ok_or_else(|| {
ApiError::validation(format!("Unknown provider '{}'", provider_id))
})?;
if !caps.genres {
return Err(ApiError::not_implemented("genres not supported by this provider"));
}
let ct = parse_content_type(params.content_type.as_deref())?; let ct = parse_content_type(params.content_type.as_deref())?;
let genres = registry.list_genres(provider_id, ct.as_ref()).await?; let genres = state
.library_repo
.list_genres(ct.as_ref(), params.provider.as_deref())
.await?;
Ok(Json(genres)) Ok(Json(genres))
} }
/// Search / browse library items. Used by the block editor to preview what a
/// filter matches before saving a channel config.
async fn search_items( async fn search_items(
State(state): State<AppState>, State(state): State<AppState>,
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
RawQuery(raw_query): RawQuery, RawQuery(raw_query): RawQuery,
) -> Result<Json<Vec<LibraryItemResponse>>, ApiError> { ) -> Result<Json<PagedResponse<LibraryItemResponse>>, ApiError> {
let qs_config = serde_qs::Config::new(2, false); // non-strict: accept encoded brackets let qs_config = serde_qs::Config::new(2, false);
let params: ItemsQuery = raw_query let params: ItemsQuery = raw_query
.as_deref() .as_deref()
.map(|q| qs_config.deserialize_str::<ItemsQuery>(q)) .map(|q| qs_config.deserialize_str::<ItemsQuery>(q))
.transpose() .transpose()
.map_err(|e| ApiError::validation(e.to_string()))? .map_err(|e| ApiError::validation(e.to_string()))?
.unwrap_or_default(); .unwrap_or_default();
let limit = params.limit.unwrap_or(50).min(200); let limit = params.limit.unwrap_or(50).min(200);
let offset = params.offset.unwrap_or(0);
let provider_id = params.provider.as_deref().unwrap_or(""); let filter = LibrarySearchFilter {
provider_id: params.provider,
let filter = MediaFilter {
content_type: parse_content_type(params.content_type.as_deref())?, content_type: parse_content_type(params.content_type.as_deref())?,
search_term: params.q,
series_names: params.series, series_names: params.series,
collections: params collection_id: params.collection,
.collection genres: params.genres,
.map(|c| vec![c]) search_term: params.q,
.unwrap_or_default(), season_number: params.season,
offset,
limit,
..Default::default() ..Default::default()
}; };
let registry = state.provider_registry.read().await; let (items, total) = state.library_repo.search(&filter).await?;
let mut items = registry.fetch_items(provider_id, &filter).await?; let resp = items.into_iter().map(library_item_to_response).collect();
Ok(Json(PagedResponse { items: resp, total }))
// Apply the same ordering the schedule engine uses so the preview reflects
// what will actually be scheduled rather than raw provider order.
match params.strategy.as_deref() {
Some("random") => {
use rand::seq::SliceRandom;
items.shuffle(&mut rand::thread_rng());
}
Some("best_fit") => {
// Mirror the greedy bin-packing: longest items first.
items.sort_by(|a, b| b.duration_secs.cmp(&a.duration_secs));
}
_ => {} // "sequential" / unset: keep provider order (episode order per series)
} }
let response: Vec<LibraryItemResponse> = items async fn get_item(
State(state): State<AppState>,
CurrentUser(_user): CurrentUser,
Path(id): Path<String>,
) -> Result<Json<LibraryItemResponse>, ApiError> {
let item = state
.library_repo
.get_by_id(&id)
.await?
.ok_or_else(|| ApiError::NotFound(format!("Library item '{}' not found", id)))?;
Ok(Json(library_item_to_response(item)))
}
async fn sync_status(
State(state): State<AppState>,
CurrentUser(_user): CurrentUser,
) -> Result<Json<Vec<SyncLogResponse>>, ApiError> {
let entries = state.library_repo.latest_sync_status().await?;
let resp = entries
.into_iter() .into_iter()
.take(limit) .map(|e| SyncLogResponse {
.map(|item| LibraryItemResponse { id: e.id,
id: item.id.into_inner(), provider_id: e.provider_id,
title: item.title, started_at: e.started_at,
content_type: match item.content_type { finished_at: e.finished_at,
domain::ContentType::Movie => "movie".into(), items_found: e.items_found,
domain::ContentType::Episode => "episode".into(), status: e.status,
domain::ContentType::Short => "short".into(), error_msg: e.error_msg,
},
duration_secs: item.duration_secs,
series_name: item.series_name,
season_number: item.season_number,
episode_number: item.episode_number,
year: item.year,
genres: item.genres,
}) })
.collect(); .collect();
Ok(Json(resp))
}
Ok(Json(response)) async fn trigger_sync(
State(state): State<AppState>,
CurrentUser(_user): CurrentUser,
) -> Result<impl IntoResponse, ApiError> {
use domain::IProviderRegistry as _;
let provider_ids: Vec<String> = {
let reg = state.provider_registry.read().await;
reg.provider_ids()
};
// 409 if any provider is already syncing
for pid in &provider_ids {
let running = state.library_repo.is_sync_running(pid).await?;
if running {
return Ok((
StatusCode::CONFLICT,
Json(serde_json::json!({
"error": format!("Sync already running for provider '{}'", pid)
})),
)
.into_response());
}
}
// Spawn background sync
let sync_adapter: Arc<dyn LibrarySyncAdapter> = Arc::clone(&state.library_sync_adapter);
let registry = Arc::clone(&state.provider_registry);
tokio::spawn(async move {
let providers: Vec<(String, Arc<dyn domain::IMediaProvider>)> = {
let reg = registry.read().await;
provider_ids
.iter()
.filter_map(|id| reg.get_provider(id).map(|p| (id.clone(), p)))
.collect()
};
for (pid, provider) in providers {
let result = sync_adapter.sync_provider(provider.as_ref(), &pid).await;
if let Some(ref err) = result.error {
tracing::warn!("manual sync: provider '{}' failed: {}", pid, err);
} else {
tracing::info!(
"manual sync: provider '{}' done — {} items in {}ms",
pid,
result.items_found,
result.duration_ms
);
}
}
});
Ok((
StatusCode::ACCEPTED,
Json(serde_json::json!({ "message": "Sync started" })),
)
.into_response())
}
async fn list_shows(
State(state): State<AppState>,
CurrentUser(_user): CurrentUser,
Query(params): Query<ShowsQuery>,
) -> Result<Json<Vec<ShowSummaryResponse>>, ApiError> {
let shows = state
.library_repo
.list_shows(
params.provider.as_deref(),
params.q.as_deref(),
&params.genres,
)
.await?;
let resp = shows
.into_iter()
.map(|s| ShowSummaryResponse {
series_name: s.series_name,
episode_count: s.episode_count,
season_count: s.season_count,
thumbnail_url: s.thumbnail_url,
genres: s.genres,
})
.collect();
Ok(Json(resp))
}
async fn list_seasons(
State(state): State<AppState>,
CurrentUser(_user): CurrentUser,
Path(name): Path<String>,
Query(params): Query<SeasonsQuery>,
) -> Result<Json<Vec<SeasonSummaryResponse>>, ApiError> {
let seasons = state
.library_repo
.list_seasons(&name, params.provider.as_deref())
.await?;
let resp = seasons
.into_iter()
.map(|s| SeasonSummaryResponse {
season_number: s.season_number,
episode_count: s.episode_count,
thumbnail_url: s.thumbnail_url,
})
.collect();
Ok(Json(resp))
}
async fn get_settings(
State(state): State<AppState>,
AdminUser(_user): AdminUser,
) -> Result<Json<HashMap<String, serde_json::Value>>, ApiError> {
let pairs = state.app_settings_repo.get_all().await?;
let map: HashMap<String, serde_json::Value> = pairs
.into_iter()
.map(|(k, v)| {
// Try to parse as number first, then bool, then keep as string
let val = if let Ok(n) = v.parse::<i64>() {
serde_json::Value::Number(n.into())
} else if let Ok(b) = v.parse::<bool>() {
serde_json::Value::Bool(b)
} else {
serde_json::Value::String(v)
};
(k, val)
})
.collect();
Ok(Json(map))
}
async fn update_settings(
State(state): State<AppState>,
AdminUser(_user): AdminUser,
Json(body): Json<HashMap<String, serde_json::Value>>,
) -> Result<Json<HashMap<String, serde_json::Value>>, ApiError> {
for (key, val) in &body {
let val_str = match val {
serde_json::Value::String(s) => s.clone(),
serde_json::Value::Number(n) => n.to_string(),
serde_json::Value::Bool(b) => b.to_string(),
other => other.to_string(),
};
state.app_settings_repo.set(key, &val_str).await?;
}
// Return the updated state
let pairs = state.app_settings_repo.get_all().await?;
let map: HashMap<String, serde_json::Value> = pairs
.into_iter()
.map(|(k, v)| {
let val = if let Ok(n) = v.parse::<i64>() {
serde_json::Value::Number(n.into())
} else if let Ok(b) = v.parse::<bool>() {
serde_json::Value::Bool(b)
} else {
serde_json::Value::String(v)
};
(k, val)
})
.collect();
Ok(Json(map))
} }
// ============================================================================ // ============================================================================
@@ -286,3 +461,22 @@ fn parse_content_type(s: Option<&str>) -> Result<Option<ContentType>, ApiError>
))), ))),
} }
} }
fn library_item_to_response(item: domain::LibraryItem) -> LibraryItemResponse {
LibraryItemResponse {
id: item.id,
title: item.title,
content_type: match item.content_type {
ContentType::Movie => "movie".into(),
ContentType::Episode => "episode".into(),
ContentType::Short => "short".into(),
},
duration_secs: item.duration_secs,
series_name: item.series_name,
season_number: item.season_number,
episode_number: item.episode_number,
year: item.year,
genres: item.genres,
thumbnail_url: item.thumbnail_url,
}
}

View File

@@ -25,4 +25,5 @@ pub fn api_v1_router() -> Router<AppState> {
.nest("/files", files::router()) .nest("/files", files::router())
.nest("/iptv", iptv::router()) .nest("/iptv", iptv::router())
.nest("/library", library::router()) .nest("/library", library::router())
.nest("/admin", library::admin_router())
} }

View File

@@ -92,12 +92,13 @@ mod tests {
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Duration, Utc}; use chrono::{DateTime, Duration, Utc};
use domain::{
Channel, ChannelRepository, Collection, DomainResult, GeneratedSchedule, IProviderRegistry,
MediaFilter, MediaItem, MediaItemId, PlaybackRecord, ProviderCapabilities, ScheduleEngineService,
ScheduleRepository, SeriesSummary, StreamQuality, StreamingProtocol,
};
use domain::value_objects::{ChannelId, ContentType, UserId}; use domain::value_objects::{ChannelId, ContentType, UserId};
use std::collections::HashMap;
use domain::{
BlockId, Channel, ChannelRepository, Collection, DomainResult, GeneratedSchedule,
IProviderRegistry, MediaFilter, MediaItem, MediaItemId, PlaybackRecord, ProviderCapabilities,
ScheduleEngineService, ScheduleRepository, SeriesSummary, StreamQuality,
};
use uuid::Uuid; use uuid::Uuid;
// ── Mocks ───────────────────────────────────────────────────────────────── // ── Mocks ─────────────────────────────────────────────────────────────────
@@ -126,6 +127,10 @@ mod tests {
async fn delete(&self, _id: ChannelId) -> DomainResult<()> { async fn delete(&self, _id: ChannelId) -> DomainResult<()> {
unimplemented!() unimplemented!()
} }
async fn save_config_snapshot(&self, _channel_id: ChannelId, _config: &domain::ScheduleConfig, _label: Option<String>) -> DomainResult<domain::ChannelConfigSnapshot> { unimplemented!() }
async fn list_config_snapshots(&self, _channel_id: ChannelId) -> DomainResult<Vec<domain::ChannelConfigSnapshot>> { unimplemented!() }
async fn get_config_snapshot(&self, _channel_id: ChannelId, _snapshot_id: Uuid) -> DomainResult<Option<domain::ChannelConfigSnapshot>> { unimplemented!() }
async fn patch_config_snapshot_label(&self, _channel_id: ChannelId, _snapshot_id: Uuid, _label: Option<String>) -> DomainResult<Option<domain::ChannelConfigSnapshot>> { unimplemented!() }
} }
struct MockScheduleRepo { struct MockScheduleRepo {
@@ -142,32 +147,55 @@ mod tests {
) -> DomainResult<Option<GeneratedSchedule>> { ) -> DomainResult<Option<GeneratedSchedule>> {
Ok(None) Ok(None)
} }
async fn find_latest(&self, _channel_id: ChannelId) -> DomainResult<Option<GeneratedSchedule>> { async fn find_latest(
&self,
_channel_id: ChannelId,
) -> DomainResult<Option<GeneratedSchedule>> {
Ok(self.latest.clone()) Ok(self.latest.clone())
} }
async fn save(&self, schedule: &GeneratedSchedule) -> DomainResult<()> { async fn save(&self, schedule: &GeneratedSchedule) -> DomainResult<()> {
self.saved.lock().unwrap().push(schedule.clone()); self.saved.lock().unwrap().push(schedule.clone());
Ok(()) Ok(())
} }
async fn find_playback_history(&self, _channel_id: ChannelId) -> DomainResult<Vec<PlaybackRecord>> { async fn find_playback_history(
&self,
_channel_id: ChannelId,
) -> DomainResult<Vec<PlaybackRecord>> {
Ok(vec![]) Ok(vec![])
} }
async fn save_playback_record(&self, _record: &PlaybackRecord) -> DomainResult<()> { async fn save_playback_record(&self, _record: &PlaybackRecord) -> DomainResult<()> {
Ok(()) Ok(())
} }
async fn find_last_slot_per_block(
&self,
_channel_id: ChannelId,
) -> DomainResult<HashMap<BlockId, MediaItemId>> {
Ok(HashMap::new())
}
async fn list_schedule_history(&self, _channel_id: ChannelId) -> DomainResult<Vec<GeneratedSchedule>> { unimplemented!() }
async fn get_schedule_by_id(&self, _channel_id: ChannelId, _schedule_id: Uuid) -> DomainResult<Option<GeneratedSchedule>> { unimplemented!() }
async fn delete_schedules_after(&self, _channel_id: ChannelId, _target_generation: u32) -> DomainResult<()> { unimplemented!() }
} }
struct MockRegistry; struct MockRegistry;
#[async_trait] #[async_trait]
impl IProviderRegistry for MockRegistry { impl IProviderRegistry for MockRegistry {
async fn fetch_items(&self, _provider_id: &str, _filter: &MediaFilter) -> DomainResult<Vec<MediaItem>> { async fn fetch_items(
&self,
_provider_id: &str,
_filter: &MediaFilter,
) -> DomainResult<Vec<MediaItem>> {
Ok(vec![]) Ok(vec![])
} }
async fn fetch_by_id(&self, _item_id: &MediaItemId) -> DomainResult<Option<MediaItem>> { async fn fetch_by_id(&self, _item_id: &MediaItemId) -> DomainResult<Option<MediaItem>> {
Ok(None) Ok(None)
} }
async fn get_stream_url(&self, _item_id: &MediaItemId, _quality: &StreamQuality) -> DomainResult<String> { async fn get_stream_url(
&self,
_item_id: &MediaItemId,
_quality: &StreamQuality,
) -> DomainResult<String> {
unimplemented!() unimplemented!()
} }
fn provider_ids(&self) -> Vec<String> { fn provider_ids(&self) -> Vec<String> {
@@ -182,10 +210,18 @@ mod tests {
async fn list_collections(&self, _provider_id: &str) -> DomainResult<Vec<Collection>> { async fn list_collections(&self, _provider_id: &str) -> DomainResult<Vec<Collection>> {
unimplemented!() unimplemented!()
} }
async fn list_series(&self, _provider_id: &str, _collection_id: Option<&str>) -> DomainResult<Vec<SeriesSummary>> { async fn list_series(
&self,
_provider_id: &str,
_collection_id: Option<&str>,
) -> DomainResult<Vec<SeriesSummary>> {
unimplemented!() unimplemented!()
} }
async fn list_genres(&self, _provider_id: &str, _content_type: Option<&ContentType>) -> DomainResult<Vec<String>> { async fn list_genres(
&self,
_provider_id: &str,
_content_type: Option<&ContentType>,
) -> DomainResult<Vec<String>> {
unimplemented!() unimplemented!()
} }
} }
@@ -226,9 +262,12 @@ mod tests {
async fn test_no_schedule_generates_from_now() { async fn test_no_schedule_generates_from_now() {
let ch = make_channel(); let ch = make_channel();
let saved = Arc::new(Mutex::new(vec![])); let saved = Arc::new(Mutex::new(vec![]));
let channel_repo: Arc<dyn ChannelRepository> = Arc::new(MockChannelRepo { channels: vec![ch] }); let channel_repo: Arc<dyn ChannelRepository> =
let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockChannelRepo { channels: vec![ch] });
Arc::new(MockScheduleRepo { latest: None, saved: saved.clone() }); let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockScheduleRepo {
latest: None,
saved: saved.clone(),
});
let engine = make_engine(channel_repo.clone(), schedule_repo); let engine = make_engine(channel_repo.clone(), schedule_repo);
let (event_tx, _) = tokio::sync::broadcast::channel(8); let (event_tx, _) = tokio::sync::broadcast::channel(8);
@@ -246,9 +285,12 @@ mod tests {
let valid_until = Utc::now() + Duration::hours(25); let valid_until = Utc::now() + Duration::hours(25);
let schedule = make_schedule(ch.id, valid_until); let schedule = make_schedule(ch.id, valid_until);
let saved = Arc::new(Mutex::new(vec![])); let saved = Arc::new(Mutex::new(vec![]));
let channel_repo: Arc<dyn ChannelRepository> = Arc::new(MockChannelRepo { channels: vec![ch] }); let channel_repo: Arc<dyn ChannelRepository> =
let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockChannelRepo { channels: vec![ch] });
Arc::new(MockScheduleRepo { latest: Some(schedule), saved: saved.clone() }); let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockScheduleRepo {
latest: Some(schedule),
saved: saved.clone(),
});
let engine = make_engine(channel_repo.clone(), schedule_repo); let engine = make_engine(channel_repo.clone(), schedule_repo);
let (event_tx, _) = tokio::sync::broadcast::channel(8); let (event_tx, _) = tokio::sync::broadcast::channel(8);
@@ -263,9 +305,12 @@ mod tests {
let valid_until = Utc::now() + Duration::hours(20); let valid_until = Utc::now() + Duration::hours(20);
let schedule = make_schedule(ch.id, valid_until); let schedule = make_schedule(ch.id, valid_until);
let saved = Arc::new(Mutex::new(vec![])); let saved = Arc::new(Mutex::new(vec![]));
let channel_repo: Arc<dyn ChannelRepository> = Arc::new(MockChannelRepo { channels: vec![ch] }); let channel_repo: Arc<dyn ChannelRepository> =
let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockChannelRepo { channels: vec![ch] });
Arc::new(MockScheduleRepo { latest: Some(schedule), saved: saved.clone() }); let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockScheduleRepo {
latest: Some(schedule),
saved: saved.clone(),
});
let engine = make_engine(channel_repo.clone(), schedule_repo); let engine = make_engine(channel_repo.clone(), schedule_repo);
let (event_tx, _) = tokio::sync::broadcast::channel(8); let (event_tx, _) = tokio::sync::broadcast::channel(8);
@@ -282,9 +327,12 @@ mod tests {
let valid_until = Utc::now() - Duration::hours(1); let valid_until = Utc::now() - Duration::hours(1);
let schedule = make_schedule(ch.id, valid_until); let schedule = make_schedule(ch.id, valid_until);
let saved = Arc::new(Mutex::new(vec![])); let saved = Arc::new(Mutex::new(vec![]));
let channel_repo: Arc<dyn ChannelRepository> = Arc::new(MockChannelRepo { channels: vec![ch] }); let channel_repo: Arc<dyn ChannelRepository> =
let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockChannelRepo { channels: vec![ch] });
Arc::new(MockScheduleRepo { latest: Some(schedule), saved: saved.clone() }); let schedule_repo: Arc<dyn ScheduleRepository> = Arc::new(MockScheduleRepo {
latest: Some(schedule),
saved: saved.clone(),
});
let engine = make_engine(channel_repo.clone(), schedule_repo); let engine = make_engine(channel_repo.clone(), schedule_repo);
let (event_tx, _) = tokio::sync::broadcast::channel(8); let (event_tx, _) = tokio::sync::broadcast::channel(8);

View File

@@ -9,13 +9,15 @@ use infra::auth::jwt::{JwtConfig, JwtValidator};
#[cfg(feature = "auth-oidc")] #[cfg(feature = "auth-oidc")]
use infra::auth::oidc::OidcService; use infra::auth::oidc::OidcService;
use std::collections::VecDeque; use std::collections::VecDeque;
#[cfg(feature = "local-files")]
use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use tokio::sync::broadcast; use tokio::sync::broadcast;
use crate::config::Config; use crate::config::Config;
use crate::events::EventBus; use crate::events::EventBus;
use crate::log_layer::LogLine; use crate::log_layer::LogLine;
use domain::{ActivityLogRepository, ChannelService, ProviderConfigRepository, ScheduleEngineService, UserService}; use domain::{ActivityLogRepository, ChannelService, IAppSettingsRepository, ILibraryRepository, LibrarySyncAdapter, ProviderConfigRepository, ScheduleEngineService, UserService};
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
use domain::TranscodeSettingsRepository; use domain::TranscodeSettingsRepository;
use k_core::db::DatabasePool; use k_core::db::DatabasePool;
@@ -40,9 +42,9 @@ pub struct AppState {
pub log_history: Arc<Mutex<VecDeque<LogLine>>>, pub log_history: Arc<Mutex<VecDeque<LogLine>>>,
/// Repository for persisted in-app activity events. /// Repository for persisted in-app activity events.
pub activity_log_repo: Arc<dyn ActivityLogRepository>, pub activity_log_repo: Arc<dyn ActivityLogRepository>,
/// Index for the local-files provider, used by the rescan route. /// Indexes for local-files provider instances, keyed by provider instance id.
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
pub local_index: Arc<tokio::sync::RwLock<Option<Arc<infra::LocalIndex>>>>, pub local_index: Arc<tokio::sync::RwLock<HashMap<String, Arc<infra::LocalIndex>>>>,
/// TranscodeManager for FFmpeg HLS transcoding (requires TRANSCODE_DIR). /// TranscodeManager for FFmpeg HLS transcoding (requires TRANSCODE_DIR).
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
pub transcode_manager: Arc<tokio::sync::RwLock<Option<Arc<infra::TranscodeManager>>>>, pub transcode_manager: Arc<tokio::sync::RwLock<Option<Arc<infra::TranscodeManager>>>>,
@@ -51,9 +53,13 @@ pub struct AppState {
pub transcode_settings_repo: Option<Arc<dyn TranscodeSettingsRepository>>, pub transcode_settings_repo: Option<Arc<dyn TranscodeSettingsRepository>>,
/// Database pool — used by infra factory functions for hot-reload. /// Database pool — used by infra factory functions for hot-reload.
pub db_pool: Arc<DatabasePool>, pub db_pool: Arc<DatabasePool>,
pub library_repo: Arc<dyn ILibraryRepository>,
pub library_sync_adapter: Arc<dyn LibrarySyncAdapter>,
pub app_settings_repo: Arc<dyn IAppSettingsRepository>,
} }
impl AppState { impl AppState {
#[allow(clippy::too_many_arguments)]
pub async fn new( pub async fn new(
user_service: UserService, user_service: UserService,
channel_service: ChannelService, channel_service: ChannelService,
@@ -66,6 +72,9 @@ impl AppState {
log_history: Arc<Mutex<VecDeque<LogLine>>>, log_history: Arc<Mutex<VecDeque<LogLine>>>,
activity_log_repo: Arc<dyn ActivityLogRepository>, activity_log_repo: Arc<dyn ActivityLogRepository>,
db_pool: Arc<DatabasePool>, db_pool: Arc<DatabasePool>,
library_repo: Arc<dyn ILibraryRepository>,
library_sync_adapter: Arc<dyn LibrarySyncAdapter>,
app_settings_repo: Arc<dyn IAppSettingsRepository>,
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
transcode_settings_repo: Option<Arc<dyn TranscodeSettingsRepository>>, transcode_settings_repo: Option<Arc<dyn TranscodeSettingsRepository>>,
) -> anyhow::Result<Self> { ) -> anyhow::Result<Self> {
@@ -123,6 +132,7 @@ impl AppState {
config.jwt_issuer.clone(), config.jwt_issuer.clone(),
config.jwt_audience.clone(), config.jwt_audience.clone(),
Some(config.jwt_expiry_hours), Some(config.jwt_expiry_hours),
Some(config.jwt_refresh_expiry_days),
config.is_production, config.is_production,
)?; )?;
Some(Arc::new(JwtValidator::new(jwt_config))) Some(Arc::new(JwtValidator::new(jwt_config)))
@@ -145,12 +155,15 @@ impl AppState {
log_history, log_history,
activity_log_repo, activity_log_repo,
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
local_index: Arc::new(tokio::sync::RwLock::new(None)), local_index: Arc::new(tokio::sync::RwLock::new(HashMap::new())),
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
transcode_manager: Arc::new(tokio::sync::RwLock::new(None)), transcode_manager: Arc::new(tokio::sync::RwLock::new(None)),
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
transcode_settings_repo, transcode_settings_repo,
db_pool, db_pool,
library_repo,
library_sync_adapter,
app_settings_repo,
}) })
} }
} }

View File

@@ -182,8 +182,9 @@ async fn post_webhook(
let mut req = client.post(url).body(body); let mut req = client.post(url).body(body);
let mut has_content_type = false; let mut has_content_type = false;
if let Some(h) = headers_json { if let Some(h) = headers_json
if let Ok(map) = serde_json::from_str::<serde_json::Map<String, Value>>(h) { && let Ok(map) = serde_json::from_str::<serde_json::Map<String, Value>>(h)
{
for (k, v) in &map { for (k, v) in &map {
if k.to_lowercase() == "content-type" { if k.to_lowercase() == "content-type" {
has_content_type = true; has_content_type = true;
@@ -193,7 +194,6 @@ async fn post_webhook(
} }
} }
} }
}
if !has_content_type { if !has_content_type {
req = req.header("Content-Type", "application/json"); req = req.header("Content-Type", "application/json");

View File

@@ -16,3 +16,4 @@ uuid = { version = "1.19.0", features = ["v4", "serde"] }
[dev-dependencies] [dev-dependencies]
tokio = { version = "1", features = ["rt", "macros"] } tokio = { version = "1", features = ["rt", "macros"] }
serde_json = "1"

View File

@@ -6,11 +6,12 @@
pub use crate::value_objects::{Email, UserId}; pub use crate::value_objects::{Email, UserId};
use chrono::{DateTime, NaiveTime, Timelike, Utc}; use chrono::{DateTime, NaiveTime, Timelike, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid; use uuid::Uuid;
use crate::value_objects::{ use crate::value_objects::{
AccessMode, BlockId, ChannelId, ContentType, FillStrategy, LogoPosition, MediaFilter, AccessMode, BlockId, ChannelId, ContentType, FillStrategy, LogoPosition, MediaFilter,
MediaItemId, RecyclePolicy, SlotId, MediaItemId, RecyclePolicy, SlotId, Weekday,
}; };
/// A user in the system. /// A user in the system.
@@ -132,40 +133,77 @@ impl Channel {
} }
} }
/// The user-designed programming template. /// The user-designed programming template (V2: day-keyed weekly grid).
/// ///
/// This is the shareable/exportable part of a channel. It contains an ordered /// Each day of the week has its own independent list of `ProgrammingBlock`s.
/// list of `ProgrammingBlock`s but makes no assumptions about the media source. /// A day with an empty vec (or absent key) produces no slots — valid, not an error.
/// A channel does not need to cover all 24 hours — gaps are valid and render /// A channel does not need to cover all 24 hours — gaps render as no-signal.
/// as a no-signal state on the client. ///
/// `deny_unknown_fields` is required so the `#[serde(untagged)]` compat enum
/// correctly rejects V1 `{"blocks":[...]}` payloads and falls through to `OldScheduleConfig`.
#[derive(Debug, Clone, Default, Serialize, Deserialize)] #[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ScheduleConfig { pub struct ScheduleConfig {
pub day_blocks: HashMap<Weekday, Vec<ProgrammingBlock>>,
}
/// V1 on-disk shape — kept for transparent migration only.
/// Never construct directly; use `ScheduleConfigCompat` for deserialization.
/// `deny_unknown_fields` ensures V2 payloads don't accidentally match here.
#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct OldScheduleConfig {
pub blocks: Vec<ProgrammingBlock>, pub blocks: Vec<ProgrammingBlock>,
} }
/// Deserializes either V2 (`day_blocks`) or V1 (`blocks`) from the DB.
/// V1 is automatically promoted: all blocks are copied to all 7 days.
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged)]
pub enum ScheduleConfigCompat {
V2(ScheduleConfig),
V1(OldScheduleConfig),
}
impl From<ScheduleConfigCompat> for ScheduleConfig {
fn from(c: ScheduleConfigCompat) -> Self {
match c {
ScheduleConfigCompat::V2(cfg) => cfg,
ScheduleConfigCompat::V1(old) => {
let day_blocks = Weekday::all()
.into_iter()
.map(|d| (d, old.blocks.clone()))
.collect();
ScheduleConfig { day_blocks }
}
}
}
}
impl ScheduleConfig { impl ScheduleConfig {
/// Return the block whose time window contains `time`, if any. /// Blocks for a given day. Returns empty slice if the day has no blocks.
/// pub fn blocks_for(&self, day: Weekday) -> &[ProgrammingBlock] {
/// Handles blocks that span midnight (e.g. start 23:00, duration 180 min). self.day_blocks.get(&day).map(|v| v.as_slice()).unwrap_or(&[])
pub fn find_block_at(&self, time: NaiveTime) -> Option<&ProgrammingBlock> { }
/// The block whose window contains `time` on `day`, if any.
pub fn find_block_at(&self, day: Weekday, time: NaiveTime) -> Option<&ProgrammingBlock> {
let secs = time.num_seconds_from_midnight(); let secs = time.num_seconds_from_midnight();
self.blocks.iter().find(|block| { self.blocks_for(day).iter().find(|block| {
let start = block.start_time.num_seconds_from_midnight(); let start = block.start_time.num_seconds_from_midnight();
let end = start + block.duration_mins * 60; let end = start + block.duration_mins * 60;
if end <= 86_400 { if end <= 86_400 {
secs >= start && secs < end secs >= start && secs < end
} else { } else {
// Block crosses midnight: active from `start` to `end % 86400` next day
secs >= start || secs < (end % 86_400) secs >= start || secs < (end % 86_400)
} }
}) })
} }
/// Return the start time of the next block that begins strictly after `time`, /// The start time of the next block beginning strictly after `time` on `day`.
/// within the same calendar day. pub fn next_block_start_after(&self, day: Weekday, time: NaiveTime) -> Option<NaiveTime> {
pub fn next_block_start_after(&self, time: NaiveTime) -> Option<NaiveTime> {
let secs = time.num_seconds_from_midnight(); let secs = time.num_seconds_from_midnight();
self.blocks self.blocks_for(day)
.iter() .iter()
.map(|b| b.start_time.num_seconds_from_midnight()) .map(|b| b.start_time.num_seconds_from_midnight())
.filter(|&s| s > secs) .filter(|&s| s > secs)
@@ -173,9 +211,15 @@ impl ScheduleConfig {
.and_then(|s| NaiveTime::from_num_seconds_from_midnight_opt(s, 0)) .and_then(|s| NaiveTime::from_num_seconds_from_midnight_opt(s, 0))
} }
/// The earliest block start time across all blocks (used for next-day rollover). /// Earliest block start time across ALL days (used by background scheduler).
/// Returns `None` if every day is empty.
pub fn earliest_block_start(&self) -> Option<NaiveTime> { pub fn earliest_block_start(&self) -> Option<NaiveTime> {
self.blocks.iter().map(|b| b.start_time).min() self.day_blocks.values().flatten().map(|b| b.start_time).min()
}
/// Iterator over all blocks across all days (for block-ID lookups that are day-agnostic).
pub fn all_blocks(&self) -> impl Iterator<Item = &ProgrammingBlock> {
self.day_blocks.values().flatten()
} }
} }
@@ -302,9 +346,13 @@ pub struct MediaItem {
pub season_number: Option<u32>, pub season_number: Option<u32>,
/// For episodes: episode number within the season (1-based). /// For episodes: episode number within the season (1-based).
pub episode_number: Option<u32>, pub episode_number: Option<u32>,
/// Provider-served thumbnail image URL, populated if available.
pub thumbnail_url: Option<String>,
/// Provider-specific collection this item belongs to.
pub collection_id: Option<String>,
} }
/// A fully resolved 48-hour broadcast program for one channel. /// A fully resolved 7-day broadcast program for one channel.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GeneratedSchedule { pub struct GeneratedSchedule {
pub id: Uuid, pub id: Uuid,
@@ -356,6 +404,18 @@ pub struct PlaybackRecord {
pub generation: u32, pub generation: u32,
} }
/// A point-in-time snapshot of a channel's `ScheduleConfig`.
/// Auto-created on every config save; users can pin with a label.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChannelConfigSnapshot {
pub id: Uuid,
pub channel_id: ChannelId,
pub config: ScheduleConfig,
pub version_num: i64,
pub label: Option<String>,
pub created_at: DateTime<Utc>,
}
impl PlaybackRecord { impl PlaybackRecord {
pub fn new(channel_id: ChannelId, item_id: MediaItemId, generation: u32) -> Self { pub fn new(channel_id: ChannelId, item_id: MediaItemId, generation: u32) -> Self {
Self { Self {
@@ -367,3 +427,74 @@ impl PlaybackRecord {
} }
} }
} }
#[cfg(test)]
mod schedule_config_tests {
use super::*;
use chrono::NaiveTime;
fn t(h: u32, m: u32) -> NaiveTime {
NaiveTime::from_hms_opt(h, m, 0).unwrap()
}
fn make_block(start: NaiveTime, duration_mins: u32) -> ProgrammingBlock {
ProgrammingBlock::new_algorithmic(
"test", start, duration_mins,
Default::default(), FillStrategy::Random,
)
}
fn cfg_with_monday_block(start: NaiveTime, dur: u32) -> ScheduleConfig {
let mut cfg = ScheduleConfig::default();
cfg.day_blocks.insert(Weekday::Monday, vec![make_block(start, dur)]);
cfg
}
#[test]
fn find_block_at_finds_active_block() {
let cfg = cfg_with_monday_block(t(8, 0), 60);
assert!(cfg.find_block_at(Weekday::Monday, t(8, 30)).is_some());
assert!(cfg.find_block_at(Weekday::Monday, t(9, 0)).is_none());
}
#[test]
fn find_block_at_wrong_day_returns_none() {
let cfg = cfg_with_monday_block(t(8, 0), 60);
assert!(cfg.find_block_at(Weekday::Tuesday, t(8, 30)).is_none());
}
#[test]
fn v1_compat_copies_blocks_to_all_days() {
let json = r#"{"blocks": []}"#;
let compat: ScheduleConfigCompat = serde_json::from_str(json).unwrap();
let cfg: ScheduleConfig = compat.into();
assert_eq!(cfg.day_blocks.len(), 7);
}
#[test]
fn v2_payload_with_unknown_blocks_key_fails() {
let json = r#"{"blocks": [], "day_blocks": {}}"#;
let result: Result<ScheduleConfigCompat, _> = serde_json::from_str(json);
match result {
Ok(ScheduleConfigCompat::V2(cfg)) => {
let _ = cfg;
}
Ok(ScheduleConfigCompat::V1(_)) => { /* acceptable */ }
Err(_) => { /* acceptable — ambiguous payload rejected */ }
}
}
#[test]
fn earliest_block_start_across_days() {
let mut cfg = ScheduleConfig::default();
cfg.day_blocks.insert(Weekday::Monday, vec![make_block(t(10, 0), 60)]);
cfg.day_blocks.insert(Weekday::Friday, vec![make_block(t(7, 0), 60)]);
assert_eq!(cfg.earliest_block_start(), Some(t(7, 0)));
}
#[test]
fn empty_config_earliest_block_start_is_none() {
let cfg = ScheduleConfig::default();
assert!(cfg.earliest_block_start().is_none());
}
}

View File

@@ -58,6 +58,8 @@ mod tests {
series_name: None, series_name: None,
season_number: None, season_number: None,
episode_number: None, episode_number: None,
thumbnail_url: None,
collection_id: None,
}, },
source_block_id: Uuid::new_v4(), source_block_id: Uuid::new_v4(),
} }

View File

@@ -6,6 +6,7 @@
pub mod entities; pub mod entities;
pub mod errors; pub mod errors;
pub mod iptv; pub mod iptv;
pub mod library;
pub mod ports; pub mod ports;
pub mod repositories; pub mod repositories;
pub mod services; pub mod services;
@@ -19,5 +20,10 @@ pub use events::DomainEvent;
pub use ports::{Collection, IMediaProvider, IProviderRegistry, ProviderCapabilities, SeriesSummary, StreamingProtocol, StreamQuality}; pub use ports::{Collection, IMediaProvider, IProviderRegistry, ProviderCapabilities, SeriesSummary, StreamingProtocol, StreamQuality};
pub use repositories::*; pub use repositories::*;
pub use iptv::{generate_m3u, generate_xmltv}; pub use iptv::{generate_m3u, generate_xmltv};
pub use library::{
ILibraryRepository, LibraryCollection, LibraryItem, LibrarySearchFilter,
LibrarySyncAdapter, LibrarySyncLogEntry, LibrarySyncResult,
SeasonSummary, ShowSummary,
};
pub use services::{ChannelService, ScheduleEngineService, UserService}; pub use services::{ChannelService, ScheduleEngineService, UserService};
pub use value_objects::*; pub use value_objects::*;

View File

@@ -0,0 +1,187 @@
//! Library domain types and ports.
use async_trait::async_trait;
use crate::{ContentType, DomainResult, IMediaProvider};
/// A media item stored in the local library cache.
#[derive(Debug, Clone)]
pub struct LibraryItem {
pub id: String,
pub provider_id: String,
pub external_id: String,
pub title: String,
pub content_type: ContentType,
pub duration_secs: u32,
pub series_name: Option<String>,
pub season_number: Option<u32>,
pub episode_number: Option<u32>,
pub year: Option<u16>,
pub genres: Vec<String>,
pub tags: Vec<String>,
pub collection_id: Option<String>,
pub collection_name: Option<String>,
pub collection_type: Option<String>,
pub thumbnail_url: Option<String>,
pub synced_at: String,
}
/// A collection summary derived from synced library items.
#[derive(Debug, Clone)]
pub struct LibraryCollection {
pub id: String,
pub name: String,
pub collection_type: Option<String>,
}
/// Result of a single provider sync run.
#[derive(Debug, Clone)]
pub struct LibrarySyncResult {
pub provider_id: String,
pub items_found: u32,
pub duration_ms: u64,
pub error: Option<String>,
}
/// Log entry from library_sync_log table.
#[derive(Debug, Clone)]
pub struct LibrarySyncLogEntry {
pub id: i64,
pub provider_id: String,
pub started_at: String,
pub finished_at: Option<String>,
pub items_found: u32,
pub status: String,
pub error_msg: Option<String>,
}
/// Filter for searching the local library.
#[derive(Debug, Clone)]
pub struct LibrarySearchFilter {
pub provider_id: Option<String>,
pub content_type: Option<ContentType>,
pub series_names: Vec<String>,
pub collection_id: Option<String>,
pub genres: Vec<String>,
pub decade: Option<u16>,
pub min_duration_secs: Option<u32>,
pub max_duration_secs: Option<u32>,
pub search_term: Option<String>,
pub season_number: Option<u32>,
pub offset: u32,
pub limit: u32,
}
impl Default for LibrarySearchFilter {
fn default() -> Self {
Self {
provider_id: None,
content_type: None,
series_names: vec![],
collection_id: None,
genres: vec![],
decade: None,
min_duration_secs: None,
max_duration_secs: None,
search_term: None,
season_number: None,
offset: 0,
limit: 50,
}
}
}
/// Aggregated summary of a TV show derived from synced episodes.
#[derive(Debug, Clone)]
pub struct ShowSummary {
pub series_name: String,
pub episode_count: u32,
pub season_count: u32,
pub thumbnail_url: Option<String>,
pub genres: Vec<String>,
}
/// Aggregated summary of one season of a TV show.
#[derive(Debug, Clone)]
pub struct SeasonSummary {
pub season_number: u32,
pub episode_count: u32,
pub thumbnail_url: Option<String>,
}
/// Port: sync one provider's items into the library repo.
/// DB writes are handled entirely inside implementations — no pool in the trait.
#[async_trait]
pub trait LibrarySyncAdapter: Send + Sync {
async fn sync_provider(
&self,
provider: &dyn IMediaProvider,
provider_id: &str,
) -> LibrarySyncResult;
}
/// Port: read/write access to the persisted library.
#[async_trait]
pub trait ILibraryRepository: Send + Sync {
async fn search(&self, filter: &LibrarySearchFilter) -> DomainResult<(Vec<LibraryItem>, u32)>;
async fn get_by_id(&self, id: &str) -> DomainResult<Option<LibraryItem>>;
async fn list_collections(&self, provider_id: Option<&str>) -> DomainResult<Vec<LibraryCollection>>;
async fn list_series(&self, provider_id: Option<&str>) -> DomainResult<Vec<String>>;
async fn list_genres(&self, content_type: Option<&ContentType>, provider_id: Option<&str>) -> DomainResult<Vec<String>>;
async fn upsert_items(&self, provider_id: &str, items: Vec<LibraryItem>) -> DomainResult<()>;
async fn clear_provider(&self, provider_id: &str) -> DomainResult<()>;
async fn log_sync_start(&self, provider_id: &str) -> DomainResult<i64>;
async fn log_sync_finish(&self, log_id: i64, result: &LibrarySyncResult) -> DomainResult<()>;
async fn latest_sync_status(&self) -> DomainResult<Vec<LibrarySyncLogEntry>>;
async fn is_sync_running(&self, provider_id: &str) -> DomainResult<bool>;
async fn list_shows(
&self,
provider_id: Option<&str>,
search_term: Option<&str>,
genres: &[String],
) -> DomainResult<Vec<ShowSummary>>;
async fn list_seasons(
&self,
series_name: &str,
provider_id: Option<&str>,
) -> DomainResult<Vec<SeasonSummary>>;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn library_item_id_uses_double_colon_separator() {
let item = LibraryItem {
id: "jellyfin::abc123".to_string(),
provider_id: "jellyfin".to_string(),
external_id: "abc123".to_string(),
title: "Test Movie".to_string(),
content_type: crate::ContentType::Movie,
duration_secs: 7200,
series_name: None,
season_number: None,
episode_number: None,
year: Some(2020),
genres: vec!["Action".to_string()],
tags: vec![],
collection_id: None,
collection_name: None,
collection_type: None,
thumbnail_url: None,
synced_at: "2026-03-19T00:00:00Z".to_string(),
};
assert!(item.id.contains("::"));
assert_eq!(item.provider_id, "jellyfin");
}
#[test]
fn library_search_filter_defaults_are_empty() {
let f = LibrarySearchFilter::default();
assert!(f.genres.is_empty());
assert!(f.series_names.is_empty());
assert_eq!(f.offset, 0);
assert_eq!(f.limit, 50);
}
}

View File

@@ -3,14 +3,16 @@
//! These traits define the interface for data persistence. //! These traits define the interface for data persistence.
//! Implementations live in the infra layer. //! Implementations live in the infra layer.
use std::collections::HashMap;
use async_trait::async_trait; use async_trait::async_trait;
use chrono::DateTime; use chrono::DateTime;
use chrono::Utc; use chrono::Utc;
use uuid::Uuid; use uuid::Uuid;
use crate::entities::{Channel, GeneratedSchedule, PlaybackRecord, User}; use crate::entities::{Channel, ChannelConfigSnapshot, GeneratedSchedule, PlaybackRecord, ScheduleConfig, User};
use crate::errors::DomainResult; use crate::errors::DomainResult;
use crate::value_objects::{ChannelId, UserId}; use crate::value_objects::{BlockId, ChannelId, MediaItemId, UserId};
/// An in-app activity event stored in the database for the admin log view. /// An in-app activity event stored in the database for the admin log view.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -46,6 +48,7 @@ pub trait UserRepository: Send + Sync {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ProviderConfigRow { pub struct ProviderConfigRow {
pub id: String,
pub provider_type: String, pub provider_type: String,
pub config_json: String, pub config_json: String,
pub enabled: bool, pub enabled: bool,
@@ -55,8 +58,9 @@ pub struct ProviderConfigRow {
#[async_trait] #[async_trait]
pub trait ProviderConfigRepository: Send + Sync { pub trait ProviderConfigRepository: Send + Sync {
async fn get_all(&self) -> DomainResult<Vec<ProviderConfigRow>>; async fn get_all(&self) -> DomainResult<Vec<ProviderConfigRow>>;
async fn get_by_id(&self, id: &str) -> DomainResult<Option<ProviderConfigRow>>;
async fn upsert(&self, row: &ProviderConfigRow) -> DomainResult<()>; async fn upsert(&self, row: &ProviderConfigRow) -> DomainResult<()>;
async fn delete(&self, provider_type: &str) -> DomainResult<()>; async fn delete(&self, id: &str) -> DomainResult<()>;
} }
/// Repository port for `Channel` persistence. /// Repository port for `Channel` persistence.
@@ -69,6 +73,33 @@ pub trait ChannelRepository: Send + Sync {
/// Insert or update a channel. /// Insert or update a channel.
async fn save(&self, channel: &Channel) -> DomainResult<()>; async fn save(&self, channel: &Channel) -> DomainResult<()>;
async fn delete(&self, id: ChannelId) -> DomainResult<()>; async fn delete(&self, id: ChannelId) -> DomainResult<()>;
/// Snapshot the current config before saving a new one.
/// version_num is computed by the infra layer as MAX(version_num)+1 inside a transaction.
async fn save_config_snapshot(
&self,
channel_id: ChannelId,
config: &ScheduleConfig,
label: Option<String>,
) -> DomainResult<ChannelConfigSnapshot>;
async fn list_config_snapshots(
&self,
channel_id: ChannelId,
) -> DomainResult<Vec<ChannelConfigSnapshot>>;
async fn get_config_snapshot(
&self,
channel_id: ChannelId,
snapshot_id: Uuid,
) -> DomainResult<Option<ChannelConfigSnapshot>>;
async fn patch_config_snapshot_label(
&self,
channel_id: ChannelId,
snapshot_id: Uuid,
label: Option<String>,
) -> DomainResult<Option<ChannelConfigSnapshot>>;
} }
/// Repository port for `GeneratedSchedule` and `PlaybackRecord` persistence. /// Repository port for `GeneratedSchedule` and `PlaybackRecord` persistence.
@@ -98,6 +129,35 @@ pub trait ScheduleRepository: Send + Sync {
) -> DomainResult<Vec<PlaybackRecord>>; ) -> DomainResult<Vec<PlaybackRecord>>;
async fn save_playback_record(&self, record: &PlaybackRecord) -> DomainResult<()>; async fn save_playback_record(&self, record: &PlaybackRecord) -> DomainResult<()>;
/// Return the most recent slot per block_id across ALL schedules for a channel.
/// Resilient to any single generation having empty slots for a block.
async fn find_last_slot_per_block(
&self,
channel_id: ChannelId,
) -> DomainResult<HashMap<BlockId, MediaItemId>>;
/// List all generated schedule headers for a channel, newest first.
async fn list_schedule_history(
&self,
channel_id: ChannelId,
) -> DomainResult<Vec<GeneratedSchedule>>;
/// Fetch a specific schedule with its slots, verifying channel ownership.
async fn get_schedule_by_id(
&self,
channel_id: ChannelId,
schedule_id: Uuid,
) -> DomainResult<Option<GeneratedSchedule>>;
/// Delete all schedules with generation > target_generation for this channel.
/// Also deletes matching playback_records (no DB cascade between those tables).
/// scheduled_slots cascade via FK from generated_schedules.
async fn delete_schedules_after(
&self,
channel_id: ChannelId,
target_generation: u32,
) -> DomainResult<()>;
} }
/// Repository port for activity log persistence. /// Repository port for activity log persistence.
@@ -120,3 +180,14 @@ pub trait TranscodeSettingsRepository: Send + Sync {
/// Persist the cleanup TTL (upsert — always row id=1). /// Persist the cleanup TTL (upsert — always row id=1).
async fn save_cleanup_ttl(&self, hours: u32) -> DomainResult<()>; async fn save_cleanup_ttl(&self, hours: u32) -> DomainResult<()>;
} }
/// Repository port for general admin settings (app_settings table).
#[async_trait]
pub trait IAppSettingsRepository: Send + Sync {
/// Get a setting value by key. Returns None if not set.
async fn get(&self, key: &str) -> DomainResult<Option<String>>;
/// Set a setting value (upsert).
async fn set(&self, key: &str, value: &str) -> DomainResult<()>;
/// Get all settings as (key, value) pairs.
async fn get_all(&self) -> DomainResult<Vec<(String, String)>>;
}

View File

@@ -1,6 +1,8 @@
use std::sync::Arc; use std::sync::Arc;
use crate::entities::Channel; use uuid::Uuid;
use crate::entities::{Channel, ChannelConfigSnapshot, ScheduleConfig};
use crate::errors::{DomainError, DomainResult}; use crate::errors::{DomainError, DomainResult};
use crate::repositories::ChannelRepository; use crate::repositories::ChannelRepository;
use crate::value_objects::{ChannelId, UserId}; use crate::value_objects::{ChannelId, UserId};
@@ -42,10 +44,75 @@ impl ChannelService {
} }
pub async fn update(&self, channel: Channel) -> DomainResult<Channel> { pub async fn update(&self, channel: Channel) -> DomainResult<Channel> {
// Auto-snapshot the existing config before overwriting
if let Some(existing) = self.channel_repo.find_by_id(channel.id).await? {
self.channel_repo
.save_config_snapshot(channel.id, &existing.schedule_config, None)
.await?;
}
self.channel_repo.save(&channel).await?; self.channel_repo.save(&channel).await?;
Ok(channel) Ok(channel)
} }
pub async fn list_config_snapshots(
&self,
channel_id: ChannelId,
) -> DomainResult<Vec<ChannelConfigSnapshot>> {
self.channel_repo.list_config_snapshots(channel_id).await
}
pub async fn get_config_snapshot(
&self,
channel_id: ChannelId,
snapshot_id: Uuid,
) -> DomainResult<Option<ChannelConfigSnapshot>> {
self.channel_repo.get_config_snapshot(channel_id, snapshot_id).await
}
pub async fn patch_config_snapshot_label(
&self,
channel_id: ChannelId,
snapshot_id: Uuid,
label: Option<String>,
) -> DomainResult<Option<ChannelConfigSnapshot>> {
self.channel_repo.patch_config_snapshot_label(channel_id, snapshot_id, label).await
}
/// Restore a snapshot: auto-snapshot current config, then apply the snapshot's config.
pub async fn restore_config_snapshot(
&self,
channel_id: ChannelId,
snapshot_id: Uuid,
) -> DomainResult<Channel> {
let snapshot = self
.channel_repo
.get_config_snapshot(channel_id, snapshot_id)
.await?
.ok_or(DomainError::ChannelNotFound(channel_id))?;
let mut channel = self
.channel_repo
.find_by_id(channel_id)
.await?
.ok_or(DomainError::ChannelNotFound(channel_id))?;
// Snapshot current config before overwriting
self.channel_repo
.save_config_snapshot(channel_id, &channel.schedule_config, None)
.await?;
channel.schedule_config = snapshot.config;
channel.updated_at = chrono::Utc::now();
self.channel_repo.save(&channel).await?;
Ok(channel)
}
pub async fn save_config_snapshot(
&self,
channel_id: ChannelId,
config: &ScheduleConfig,
label: Option<String>,
) -> DomainResult<ChannelConfigSnapshot> {
self.channel_repo.save_config_snapshot(channel_id, config, label).await
}
/// Delete a channel, enforcing that `requester_id` is the owner. /// Delete a channel, enforcing that `requester_id` is the owner.
pub async fn delete(&self, id: ChannelId, requester_id: UserId) -> DomainResult<()> { pub async fn delete(&self, id: ChannelId, requester_id: UserId) -> DomainResult<()> {
let channel = self.find_by_id(id).await?; let channel = self.find_by_id(id).await?;

View File

@@ -1,6 +1,8 @@
use std::collections::HashSet; use std::collections::HashSet;
use rand::rngs::StdRng;
use rand::seq::SliceRandom; use rand::seq::SliceRandom;
use rand::SeedableRng;
use crate::entities::MediaItem; use crate::entities::MediaItem;
use crate::value_objects::{FillStrategy, MediaItemId}; use crate::value_objects::{FillStrategy, MediaItemId};
@@ -20,7 +22,7 @@ pub(super) fn fill_block<'a>(
} }
FillStrategy::Random => { FillStrategy::Random => {
let mut indices: Vec<usize> = (0..pool.len()).collect(); let mut indices: Vec<usize> = (0..pool.len()).collect();
indices.shuffle(&mut rand::thread_rng()); indices.shuffle(&mut StdRng::from_entropy());
let mut remaining = target_secs; let mut remaining = target_secs;
let mut result = Vec::new(); let mut result = Vec::new();
for i in indices { for i in indices {
@@ -127,12 +129,22 @@ pub(super) fn fill_sequential<'a>(
}; };
// Greedily fill the block's time budget in episode order. // Greedily fill the block's time budget in episode order.
// Stop at the first episode that doesn't fit — skipping would break ordering.
let mut remaining = target_secs; let mut remaining = target_secs;
let mut result = Vec::new(); let mut result = Vec::new();
for item in ordered { for item in &ordered {
if item.duration_secs <= remaining { if item.duration_secs <= remaining {
remaining -= item.duration_secs; remaining -= item.duration_secs;
result.push(item); result.push(*item);
} else {
break;
}
}
// Edge case: if the very first episode is longer than the entire block,
// still include it — the slot builder clips it to block end via .min(end).
if result.is_empty() {
if let Some(&first) = ordered.first() {
result.push(first);
} }
} }
result result

View File

@@ -1,7 +1,6 @@
use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use chrono::{DateTime, Duration, TimeZone, Utc}; use chrono::{DateTime, Datelike, Duration, TimeZone, Utc};
use chrono_tz::Tz; use chrono_tz::Tz;
use uuid::Uuid; use uuid::Uuid;
@@ -21,7 +20,7 @@ mod recycle;
/// Core scheduling engine. /// Core scheduling engine.
/// ///
/// Generates 48-hour broadcast schedules by walking through a channel's /// Generates 7-day broadcast schedules by walking through a channel's
/// `ScheduleConfig` day by day, resolving each `ProgrammingBlock` into concrete /// `ScheduleConfig` day by day, resolving each `ProgrammingBlock` into concrete
/// `ScheduledSlot`s via the `IMediaProvider`, and applying the `RecyclePolicy` /// `ScheduledSlot`s via the `IMediaProvider`, and applying the `RecyclePolicy`
/// to avoid replaying recently aired items. /// to avoid replaying recently aired items.
@@ -48,12 +47,12 @@ impl ScheduleEngineService {
// Public API // Public API
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
/// Generate and persist a 48-hour schedule for `channel_id` starting at `from`. /// Generate and persist a 7-day schedule for `channel_id` starting at `from`.
/// ///
/// The algorithm: /// The algorithm:
/// 1. Walk each calendar day in the 48-hour window. /// 1. Walk each calendar day in the 7-day window.
/// 2. For each `ProgrammingBlock`, compute its UTC wall-clock interval for that day. /// 2. For each `ProgrammingBlock`, compute its UTC wall-clock interval for that day.
/// 3. Clip the interval to `[from, from + 48h)`. /// 3. Clip the interval to `[from, from + 7d)`.
/// 4. Resolve the block content via the media provider, applying the recycle policy. /// 4. Resolve the block content via the media provider, applying the recycle policy.
/// 5. For `Sequential` blocks, resume from where the previous generation left off /// 5. For `Sequential` blocks, resume from where the previous generation left off
/// (series continuity — see `fill::fill_sequential`). /// (series continuity — see `fill::fill_sequential`).
@@ -91,21 +90,18 @@ impl ScheduleEngineService {
.map(|s| s.generation + 1) .map(|s| s.generation + 1)
.unwrap_or(1); .unwrap_or(1);
// Build the initial per-block continuity map from the previous generation's // Build the initial per-block continuity map from the most recent slot per
// last slot per block. The map is updated as each block occurrence is resolved // block across ALL schedules. This is resilient to any single generation
// within this generation so that the second day of a 48h schedule continues // having empty slots for a block (e.g. provider returned nothing transiently).
// from where the first day ended. // The map is updated as each block occurrence is resolved within this
let mut block_continuity: HashMap<BlockId, MediaItemId> = latest_schedule // generation so the second day of a 48h schedule continues from here.
.iter() let mut block_continuity = self
.flat_map(|s| &s.slots) .schedule_repo
.fold(HashMap::new(), |mut map, slot| { .find_last_slot_per_block(channel_id)
// keep only the *last* slot per block (slots are sorted ascending) .await?;
map.insert(slot.source_block_id, slot.item.id.clone());
map
});
let valid_from = from; let valid_from = from;
let valid_until = from + Duration::hours(48); let valid_until = from + Duration::days(7);
let start_date = from.with_timezone(&tz).date_naive(); let start_date = from.with_timezone(&tz).date_naive();
let end_date = valid_until.with_timezone(&tz).date_naive(); let end_date = valid_until.with_timezone(&tz).date_naive();
@@ -114,7 +110,8 @@ impl ScheduleEngineService {
let mut current_date = start_date; let mut current_date = start_date;
while current_date <= end_date { while current_date <= end_date {
for block in &channel.schedule_config.blocks { let weekday = crate::value_objects::Weekday::from(current_date.weekday());
for block in channel.schedule_config.blocks_for(weekday) {
let naive_start = current_date.and_time(block.start_time); let naive_start = current_date.and_time(block.start_time);
// `earliest()` handles DST gaps — if the local time doesn't exist // `earliest()` handles DST gaps — if the local time doesn't exist
@@ -127,7 +124,7 @@ impl ScheduleEngineService {
let block_end_utc = let block_end_utc =
block_start_utc + Duration::minutes(block.duration_mins as i64); block_start_utc + Duration::minutes(block.duration_mins as i64);
// Clip to the 48-hour window. // Clip to the 7-day window.
let slot_start = block_start_utc.max(valid_from); let slot_start = block_start_utc.max(valid_from);
let slot_end = block_end_utc.min(valid_until); let slot_end = block_end_utc.min(valid_until);
@@ -228,12 +225,38 @@ impl ScheduleEngineService {
self.provider_registry.get_stream_url(item_id, quality).await self.provider_registry.get_stream_url(item_id, quality).await
} }
/// List all generated schedule headers for a channel, newest first.
pub async fn list_schedule_history(
&self,
channel_id: ChannelId,
) -> DomainResult<Vec<GeneratedSchedule>> {
self.schedule_repo.list_schedule_history(channel_id).await
}
/// Fetch a specific schedule with its slots.
pub async fn get_schedule_by_id(
&self,
channel_id: ChannelId,
schedule_id: uuid::Uuid,
) -> DomainResult<Option<GeneratedSchedule>> {
self.schedule_repo.get_schedule_by_id(channel_id, schedule_id).await
}
/// Delete all schedules with generation > target_generation for this channel.
pub async fn delete_schedules_after(
&self,
channel_id: ChannelId,
target_generation: u32,
) -> DomainResult<()> {
self.schedule_repo.delete_schedules_after(channel_id, target_generation).await
}
/// Return all slots that overlap the given time window — the EPG data. /// Return all slots that overlap the given time window — the EPG data.
pub fn get_epg<'a>( pub fn get_epg(
schedule: &'a GeneratedSchedule, schedule: &GeneratedSchedule,
from: DateTime<Utc>, from: DateTime<Utc>,
until: DateTime<Utc>, until: DateTime<Utc>,
) -> Vec<&'a ScheduledSlot> { ) -> Vec<&ScheduledSlot> {
schedule schedule
.slots .slots
.iter() .iter()
@@ -245,6 +268,7 @@ impl ScheduleEngineService {
// Block resolution // Block resolution
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
#[allow(clippy::too_many_arguments)]
async fn resolve_block( async fn resolve_block(
&self, &self,
block: &ProgrammingBlock, block: &ProgrammingBlock,
@@ -310,6 +334,7 @@ impl ScheduleEngineService {
/// ///
/// `last_item_id` is the ID of the last item scheduled in this block in the /// `last_item_id` is the ID of the last item scheduled in this block in the
/// previous generation. Used only by `Sequential` for series continuity. /// previous generation. Used only by `Sequential` for series continuity.
#[allow(clippy::too_many_arguments)]
async fn resolve_algorithmic( async fn resolve_algorithmic(
&self, &self,
provider_id: &str, provider_id: &str,

View File

@@ -138,3 +138,64 @@ impl Default for RecyclePolicy {
} }
} }
} }
/// Day of week, used as key in weekly schedule configs.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Weekday {
Monday,
Tuesday,
Wednesday,
Thursday,
Friday,
Saturday,
Sunday,
}
impl From<chrono::Weekday> for Weekday {
fn from(w: chrono::Weekday) -> Self {
match w {
chrono::Weekday::Mon => Weekday::Monday,
chrono::Weekday::Tue => Weekday::Tuesday,
chrono::Weekday::Wed => Weekday::Wednesday,
chrono::Weekday::Thu => Weekday::Thursday,
chrono::Weekday::Fri => Weekday::Friday,
chrono::Weekday::Sat => Weekday::Saturday,
chrono::Weekday::Sun => Weekday::Sunday,
}
}
}
impl Weekday {
pub fn all() -> [Weekday; 7] {
// ISO week order: Monday = index 0, Sunday = index 6.
// The schedule engine depends on this order when iterating days.
[
Weekday::Monday, Weekday::Tuesday, Weekday::Wednesday,
Weekday::Thursday, Weekday::Friday, Weekday::Saturday, Weekday::Sunday,
]
}
}
#[cfg(test)]
mod weekday_tests {
use super::*;
#[test]
fn from_chrono_weekday_all_variants() {
assert_eq!(Weekday::from(chrono::Weekday::Mon), Weekday::Monday);
assert_eq!(Weekday::from(chrono::Weekday::Tue), Weekday::Tuesday);
assert_eq!(Weekday::from(chrono::Weekday::Wed), Weekday::Wednesday);
assert_eq!(Weekday::from(chrono::Weekday::Thu), Weekday::Thursday);
assert_eq!(Weekday::from(chrono::Weekday::Fri), Weekday::Friday);
assert_eq!(Weekday::from(chrono::Weekday::Sat), Weekday::Saturday);
assert_eq!(Weekday::from(chrono::Weekday::Sun), Weekday::Sunday);
}
#[test]
fn all_returns_monday_first_sunday_last() {
let days = Weekday::all();
assert_eq!(days[0], Weekday::Monday);
assert_eq!(days[6], Weekday::Sunday);
}
}

View File

@@ -0,0 +1,83 @@
//! SQLite implementation of IAppSettingsRepository.
use async_trait::async_trait;
use sqlx::SqlitePool;
use domain::{DomainError, DomainResult, IAppSettingsRepository};
pub struct SqliteAppSettingsRepository {
pool: SqlitePool,
}
impl SqliteAppSettingsRepository {
pub fn new(pool: SqlitePool) -> Self {
Self { pool }
}
}
#[async_trait]
impl IAppSettingsRepository for SqliteAppSettingsRepository {
async fn get(&self, key: &str) -> DomainResult<Option<String>> {
sqlx::query_scalar::<_, String>("SELECT value FROM app_settings WHERE key = ?")
.bind(key)
.fetch_optional(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
async fn set(&self, key: &str, value: &str) -> DomainResult<()> {
sqlx::query("INSERT OR REPLACE INTO app_settings (key, value) VALUES (?, ?)")
.bind(key)
.bind(value)
.execute(&self.pool)
.await
.map(|_| ())
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
async fn get_all(&self) -> DomainResult<Vec<(String, String)>> {
sqlx::query_as::<_, (String, String)>("SELECT key, value FROM app_settings ORDER BY key")
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
}
#[cfg(test)]
mod tests {
use super::*;
use sqlx::SqlitePool;
use domain::IAppSettingsRepository;
async fn setup() -> SqlitePool {
let pool = SqlitePool::connect(":memory:").await.unwrap();
sqlx::query(
"CREATE TABLE app_settings (key TEXT PRIMARY KEY, value TEXT NOT NULL)"
).execute(&pool).await.unwrap();
sqlx::query("INSERT INTO app_settings VALUES ('library_sync_interval_hours', '6')")
.execute(&pool).await.unwrap();
pool
}
#[tokio::test]
async fn get_returns_seeded_value() {
let repo = SqliteAppSettingsRepository::new(setup().await);
let val = repo.get("library_sync_interval_hours").await.unwrap();
assert_eq!(val, Some("6".to_string()));
}
#[tokio::test]
async fn set_then_get() {
let repo = SqliteAppSettingsRepository::new(setup().await);
repo.set("library_sync_interval_hours", "12").await.unwrap();
let val = repo.get("library_sync_interval_hours").await.unwrap();
assert_eq!(val, Some("12".to_string()));
}
#[tokio::test]
async fn get_all_returns_all_keys() {
let repo = SqliteAppSettingsRepository::new(setup().await);
let all = repo.get_all().await.unwrap();
assert!(!all.is_empty());
assert!(all.iter().any(|(k, _)| k == "library_sync_interval_hours"));
}
}

View File

@@ -20,8 +20,10 @@ pub struct JwtConfig {
pub issuer: Option<String>, pub issuer: Option<String>,
/// Expected audience (for validation) /// Expected audience (for validation)
pub audience: Option<String>, pub audience: Option<String>,
/// Token expiry in hours (default: 24) /// Access token expiry in hours (default: 24)
pub expiry_hours: u64, pub expiry_hours: u64,
/// Refresh token expiry in days (default: 30)
pub refresh_expiry_days: u64,
} }
impl JwtConfig { impl JwtConfig {
@@ -33,6 +35,7 @@ impl JwtConfig {
issuer: Option<String>, issuer: Option<String>,
audience: Option<String>, audience: Option<String>,
expiry_hours: Option<u64>, expiry_hours: Option<u64>,
refresh_expiry_days: Option<u64>,
is_production: bool, is_production: bool,
) -> Result<Self, JwtError> { ) -> Result<Self, JwtError> {
// Validate secret strength in production // Validate secret strength in production
@@ -48,6 +51,7 @@ impl JwtConfig {
issuer, issuer,
audience, audience,
expiry_hours: expiry_hours.unwrap_or(24), expiry_hours: expiry_hours.unwrap_or(24),
refresh_expiry_days: refresh_expiry_days.unwrap_or(30),
}) })
} }
@@ -58,10 +62,15 @@ impl JwtConfig {
issuer: None, issuer: None,
audience: None, audience: None,
expiry_hours: 24, expiry_hours: 24,
refresh_expiry_days: 30,
} }
} }
} }
fn default_token_type() -> String {
"access".to_string()
}
/// JWT claims structure /// JWT claims structure
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JwtClaims { pub struct JwtClaims {
@@ -79,6 +88,9 @@ pub struct JwtClaims {
/// Audience /// Audience
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub aud: Option<String>, pub aud: Option<String>,
/// Token type: "access" or "refresh". Defaults to "access" for backward compat.
#[serde(default = "default_token_type")]
pub token_type: String,
} }
/// JWT-related errors /// JWT-related errors
@@ -141,7 +153,7 @@ impl JwtValidator {
} }
} }
/// Create a JWT token for the given user /// Create an access JWT token for the given user
pub fn create_token(&self, user: &User) -> Result<String, JwtError> { pub fn create_token(&self, user: &User) -> Result<String, JwtError> {
let now = SystemTime::now() let now = SystemTime::now()
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
@@ -157,6 +169,30 @@ impl JwtValidator {
iat: now, iat: now,
iss: self.config.issuer.clone(), iss: self.config.issuer.clone(),
aud: self.config.audience.clone(), aud: self.config.audience.clone(),
token_type: "access".to_string(),
};
let header = Header::new(Algorithm::HS256);
encode(&header, &claims, &self.encoding_key).map_err(JwtError::CreationFailed)
}
/// Create a refresh JWT token for the given user (longer-lived)
pub fn create_refresh_token(&self, user: &User) -> Result<String, JwtError> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_secs() as usize;
let expiry = now + (self.config.refresh_expiry_days as usize * 86400);
let claims = JwtClaims {
sub: user.id.to_string(),
email: user.email.as_ref().to_string(),
exp: expiry,
iat: now,
iss: self.config.issuer.clone(),
aud: self.config.audience.clone(),
token_type: "refresh".to_string(),
}; };
let header = Header::new(Algorithm::HS256); let header = Header::new(Algorithm::HS256);
@@ -176,14 +212,28 @@ impl JwtValidator {
Ok(token_data.claims) Ok(token_data.claims)
} }
/// Validate an access token — rejects refresh tokens
pub fn validate_access_token(&self, token: &str) -> Result<JwtClaims, JwtError> {
let claims = self.validate_token(token)?;
if claims.token_type != "access" {
return Err(JwtError::ValidationFailed("Not an access token".to_string()));
}
Ok(claims)
}
/// Validate a refresh token — rejects access tokens
pub fn validate_refresh_token(&self, token: &str) -> Result<JwtClaims, JwtError> {
let claims = self.validate_token(token)?;
if claims.token_type != "refresh" {
return Err(JwtError::ValidationFailed("Not a refresh token".to_string()));
}
Ok(claims)
}
/// Get the user ID (subject) from a token without full validation /// Get the user ID (subject) from a token without full validation
/// Useful for logging/debugging, but should not be trusted for auth /// Useful for logging/debugging, but should not be trusted for auth
pub fn decode_unverified(&self, token: &str) -> Result<JwtClaims, JwtError> { pub fn decode_unverified(&self, token: &str) -> Result<JwtClaims, JwtError> {
let mut validation = Validation::new(Algorithm::HS256); let token_data = jsonwebtoken::dangerous::insecure_decode::<JwtClaims>(token)
validation.insecure_disable_signature_validation();
validation.validate_exp = false;
let token_data = decode::<JwtClaims>(token, &self.decoding_key, &validation)
.map_err(|_| JwtError::InvalidFormat)?; .map_err(|_| JwtError::InvalidFormat)?;
Ok(token_data.claims) Ok(token_data.claims)
@@ -232,6 +282,7 @@ mod tests {
None, None,
None, None,
None, None,
None,
true, // Production mode true, // Production mode
); );
@@ -245,6 +296,7 @@ mod tests {
None, None,
None, None,
None, None,
None,
false, // Development mode false, // Development mode
); );

View File

@@ -2,7 +2,7 @@ use chrono::{DateTime, Utc};
use sqlx::FromRow; use sqlx::FromRow;
use uuid::Uuid; use uuid::Uuid;
use domain::{AccessMode, Channel, ChannelId, DomainError, LogoPosition, RecyclePolicy, ScheduleConfig, UserId}; use domain::{AccessMode, Channel, ChannelId, DomainError, LogoPosition, RecyclePolicy, ScheduleConfig, ScheduleConfigCompat, UserId};
#[derive(Debug, FromRow)] #[derive(Debug, FromRow)]
pub(super) struct ChannelRow { pub(super) struct ChannelRow {
@@ -44,10 +44,11 @@ impl TryFrom<ChannelRow> for Channel {
.map_err(|e| DomainError::RepositoryError(format!("Invalid channel UUID: {}", e)))?; .map_err(|e| DomainError::RepositoryError(format!("Invalid channel UUID: {}", e)))?;
let owner_id: UserId = Uuid::parse_str(&row.owner_id) let owner_id: UserId = Uuid::parse_str(&row.owner_id)
.map_err(|e| DomainError::RepositoryError(format!("Invalid owner UUID: {}", e)))?; .map_err(|e| DomainError::RepositoryError(format!("Invalid owner UUID: {}", e)))?;
let schedule_config: ScheduleConfig = serde_json::from_str(&row.schedule_config) let schedule_config: ScheduleConfig = serde_json::from_str::<ScheduleConfigCompat>(&row.schedule_config)
.map_err(|e| { .map_err(|e| {
DomainError::RepositoryError(format!("Invalid schedule_config JSON: {}", e)) DomainError::RepositoryError(format!("Invalid schedule_config JSON: {}", e))
})?; })
.map(ScheduleConfig::from)?;
let recycle_policy: RecyclePolicy = serde_json::from_str(&row.recycle_policy) let recycle_policy: RecyclePolicy = serde_json::from_str(&row.recycle_policy)
.map_err(|e| { .map_err(|e| {
DomainError::RepositoryError(format!("Invalid recycle_policy JSON: {}", e)) DomainError::RepositoryError(format!("Invalid recycle_policy JSON: {}", e))

View File

@@ -1,6 +1,9 @@
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Utc};
use sqlx::Row;
use uuid::Uuid;
use domain::{Channel, ChannelId, ChannelRepository, DomainError, DomainResult, UserId}; use domain::{Channel, ChannelConfigSnapshot, ChannelId, ChannelRepository, DomainError, DomainResult, ScheduleConfig, ScheduleConfigCompat, UserId};
use super::mapping::{ChannelRow, SELECT_COLS}; use super::mapping::{ChannelRow, SELECT_COLS};
@@ -139,4 +142,129 @@ impl ChannelRepository for SqliteChannelRepository {
Ok(()) Ok(())
} }
async fn save_config_snapshot(
&self,
channel_id: ChannelId,
config: &ScheduleConfig,
label: Option<String>,
) -> DomainResult<ChannelConfigSnapshot> {
let id = Uuid::new_v4();
let now = Utc::now();
let config_json = serde_json::to_string(config)
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
let mut tx = self.pool.begin().await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
let version_num: i64 = sqlx::query_scalar(
"SELECT COALESCE(MAX(version_num), 0) + 1 FROM channel_config_snapshots WHERE channel_id = ?"
)
.bind(channel_id.to_string())
.fetch_one(&mut *tx)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
sqlx::query(
"INSERT INTO channel_config_snapshots (id, channel_id, config_json, version_num, label, created_at)
VALUES (?, ?, ?, ?, ?, ?)"
)
.bind(id.to_string())
.bind(channel_id.to_string())
.bind(&config_json)
.bind(version_num)
.bind(&label)
.bind(now.to_rfc3339())
.execute(&mut *tx)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
tx.commit().await.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
Ok(ChannelConfigSnapshot { id, channel_id, config: config.clone(), version_num, label, created_at: now })
}
async fn list_config_snapshots(
&self,
channel_id: ChannelId,
) -> DomainResult<Vec<ChannelConfigSnapshot>> {
let rows = sqlx::query(
"SELECT id, config_json, version_num, label, created_at
FROM channel_config_snapshots WHERE channel_id = ?
ORDER BY version_num DESC"
)
.bind(channel_id.to_string())
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
rows.iter().map(|row| {
let id: Uuid = row.get::<String, _>("id").parse()
.map_err(|_| DomainError::RepositoryError("bad uuid".into()))?;
let config_json: String = row.get("config_json");
let config_compat: ScheduleConfigCompat = serde_json::from_str(&config_json)
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
let config: ScheduleConfig = config_compat.into();
let version_num: i64 = row.get("version_num");
let label: Option<String> = row.get("label");
let created_at_str: String = row.get("created_at");
let created_at = created_at_str.parse::<DateTime<Utc>>()
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
Ok(ChannelConfigSnapshot { id, channel_id, config, version_num, label, created_at })
}).collect()
}
async fn get_config_snapshot(
&self,
channel_id: ChannelId,
snapshot_id: Uuid,
) -> DomainResult<Option<ChannelConfigSnapshot>> {
let row = sqlx::query(
"SELECT id, config_json, version_num, label, created_at
FROM channel_config_snapshots WHERE id = ? AND channel_id = ?"
)
.bind(snapshot_id.to_string())
.bind(channel_id.to_string())
.fetch_optional(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
match row {
None => Ok(None),
Some(row) => {
let config_json: String = row.get("config_json");
let config_compat: ScheduleConfigCompat = serde_json::from_str(&config_json)
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
let config: ScheduleConfig = config_compat.into();
let version_num: i64 = row.get("version_num");
let label: Option<String> = row.get("label");
let created_at_str: String = row.get("created_at");
let created_at = created_at_str.parse::<DateTime<Utc>>()
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
Ok(Some(ChannelConfigSnapshot { id: snapshot_id, channel_id, config, version_num, label, created_at }))
}
}
}
async fn patch_config_snapshot_label(
&self,
channel_id: ChannelId,
snapshot_id: Uuid,
label: Option<String>,
) -> DomainResult<Option<ChannelConfigSnapshot>> {
let updated = sqlx::query(
"UPDATE channel_config_snapshots SET label = ? WHERE id = ? AND channel_id = ? RETURNING id"
)
.bind(&label)
.bind(snapshot_id.to_string())
.bind(channel_id.to_string())
.fetch_optional(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
if updated.is_none() {
return Ok(None);
}
self.get_config_snapshot(channel_id, snapshot_id).await
}
} }

View File

@@ -1,7 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use crate::db::DatabasePool; use crate::db::DatabasePool;
use domain::{ActivityLogRepository, ChannelRepository, ProviderConfigRepository, ScheduleRepository, TranscodeSettingsRepository, UserRepository}; use domain::{ActivityLogRepository, ChannelRepository, IAppSettingsRepository, ILibraryRepository, ProviderConfigRepository, ScheduleRepository, TranscodeSettingsRepository, UserRepository};
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum FactoryError { pub enum FactoryError {
@@ -119,6 +119,36 @@ pub async fn build_transcode_settings_repository(
} }
} }
pub async fn build_library_repository(
pool: &DatabasePool,
) -> FactoryResult<Arc<dyn ILibraryRepository>> {
match pool {
#[cfg(feature = "sqlite")]
DatabasePool::Sqlite(pool) => Ok(Arc::new(
crate::library_repository::SqliteLibraryRepository::new(pool.clone()),
)),
#[allow(unreachable_patterns)]
_ => Err(FactoryError::NotImplemented(
"LibraryRepository not implemented for this database".to_string(),
)),
}
}
pub async fn build_app_settings_repository(
pool: &DatabasePool,
) -> FactoryResult<Arc<dyn IAppSettingsRepository>> {
match pool {
#[cfg(feature = "sqlite")]
DatabasePool::Sqlite(pool) => Ok(Arc::new(
crate::app_settings_repository::SqliteAppSettingsRepository::new(pool.clone()),
)),
#[allow(unreachable_patterns)]
_ => Err(FactoryError::NotImplemented(
"AppSettingsRepository not implemented for this database".to_string(),
)),
}
}
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
pub struct LocalFilesBundle { pub struct LocalFilesBundle {
pub provider: Arc<crate::LocalFilesProvider>, pub provider: Arc<crate::LocalFilesProvider>,
@@ -133,6 +163,7 @@ pub async fn build_local_files_bundle(
transcode_dir: Option<std::path::PathBuf>, transcode_dir: Option<std::path::PathBuf>,
cleanup_ttl_hours: u32, cleanup_ttl_hours: u32,
base_url: String, base_url: String,
provider_id: &str,
) -> FactoryResult<LocalFilesBundle> { ) -> FactoryResult<LocalFilesBundle> {
match pool { match pool {
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
@@ -143,7 +174,7 @@ pub async fn build_local_files_bundle(
transcode_dir: transcode_dir.clone(), transcode_dir: transcode_dir.clone(),
cleanup_ttl_hours, cleanup_ttl_hours,
}; };
let idx = Arc::new(crate::LocalIndex::new(&cfg, sqlite_pool.clone()).await); let idx = Arc::new(crate::LocalIndex::new(&cfg, sqlite_pool.clone(), provider_id.to_string()).await);
let tm = transcode_dir.as_ref().map(|td| { let tm = transcode_dir.as_ref().map(|td| {
std::fs::create_dir_all(td).ok(); std::fs::create_dir_all(td).ok();
crate::TranscodeManager::new(td.clone(), cleanup_ttl_hours) crate::TranscodeManager::new(td.clone(), cleanup_ttl_hours)

View File

@@ -31,5 +31,8 @@ pub(super) fn map_jellyfin_item(item: JellyfinItem) -> Option<MediaItem> {
series_name: item.series_name, series_name: item.series_name,
season_number: item.parent_index_number, season_number: item.parent_index_number,
episode_number: item.index_number, episode_number: item.index_number,
// TODO(library-sync): populate thumbnail_url from Jellyfin image API and collection_id from parent_id when FullSyncAdapter is implemented (Task 5)
thumbnail_url: None,
collection_id: None,
}) })
} }

View File

@@ -73,6 +73,10 @@ impl JellyfinMediaProvider {
// requested — season first, then episode within the season. // requested — season first, then episode within the season.
params.push(("SortBy", "ParentIndexNumber,IndexNumber".into())); params.push(("SortBy", "ParentIndexNumber,IndexNumber".into()));
params.push(("SortOrder", "Ascending".into())); params.push(("SortOrder", "Ascending".into()));
// Prevent Jellyfin from returning Season/Series container items.
if filter.content_type.is_none() {
params.push(("IncludeItemTypes", "Episode".into()));
}
} else { } else {
// No series filter — scope to the collection (library) if one is set. // No series filter — scope to the collection (library) if one is set.
if let Some(parent_id) = filter.collections.first() { if let Some(parent_id) = filter.collections.first() {
@@ -376,14 +380,13 @@ impl IMediaProvider for JellyfinMediaProvider {
if resp.status().is_success() { if resp.status().is_success() {
let info: JellyfinPlaybackInfoResponse = resp.json().await let info: JellyfinPlaybackInfoResponse = resp.json().await
.map_err(|e| DomainError::InfrastructureError(format!("PlaybackInfo parse failed: {e}")))?; .map_err(|e| DomainError::InfrastructureError(format!("PlaybackInfo parse failed: {e}")))?;
if let Some(src) = info.media_sources.first() { if let Some(src) = info.media_sources.first()
if src.supports_direct_stream { && src.supports_direct_stream
if let Some(rel_url) = &src.direct_stream_url { && let Some(rel_url) = &src.direct_stream_url
{
return Ok(format!("{}{}&api_key={}", self.config.base_url, rel_url, self.config.api_key)); return Ok(format!("{}{}&api_key={}", self.config.base_url, rel_url, self.config.api_key));
} }
} }
}
}
// Fallback: HLS at 8 Mbps // Fallback: HLS at 8 Mbps
Ok(self.hls_url(item_id, 8_000_000)) Ok(self.hls_url(item_id, 8_000_000))
} }

View File

@@ -18,8 +18,13 @@ pub mod db;
pub mod factory; pub mod factory;
pub mod jellyfin; pub mod jellyfin;
pub mod provider_registry; pub mod provider_registry;
mod library_sync;
pub use library_sync::FullSyncAdapter;
mod app_settings_repository;
mod activity_log_repository; mod activity_log_repository;
mod channel_repository; mod channel_repository;
mod library_repository;
mod provider_config_repository; mod provider_config_repository;
mod schedule_repository; mod schedule_repository;
mod transcode_settings_repository; mod transcode_settings_repository;
@@ -32,6 +37,8 @@ pub mod local_files;
pub use db::run_migrations; pub use db::run_migrations;
pub use provider_registry::ProviderRegistry; pub use provider_registry::ProviderRegistry;
#[cfg(feature = "sqlite")]
pub use app_settings_repository::SqliteAppSettingsRepository;
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
pub use activity_log_repository::SqliteActivityLogRepository; pub use activity_log_repository::SqliteActivityLogRepository;
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
@@ -44,6 +51,8 @@ pub use provider_config_repository::SqliteProviderConfigRepository;
pub use schedule_repository::SqliteScheduleRepository; pub use schedule_repository::SqliteScheduleRepository;
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
pub use transcode_settings_repository::SqliteTranscodeSettingsRepository; pub use transcode_settings_repository::SqliteTranscodeSettingsRepository;
#[cfg(feature = "sqlite")]
pub use library_repository::SqliteLibraryRepository;
pub use domain::TranscodeSettingsRepository; pub use domain::TranscodeSettingsRepository;

View File

@@ -0,0 +1,508 @@
//! SQLite implementation of ILibraryRepository.
use async_trait::async_trait;
use sqlx::SqlitePool;
use domain::{
ContentType, DomainError, DomainResult, ILibraryRepository,
LibraryCollection, LibraryItem, LibrarySearchFilter, LibrarySyncLogEntry, LibrarySyncResult,
SeasonSummary, ShowSummary,
};
pub struct SqliteLibraryRepository {
pool: SqlitePool,
}
impl SqliteLibraryRepository {
pub fn new(pool: SqlitePool) -> Self {
Self { pool }
}
}
fn content_type_str(ct: &ContentType) -> &'static str {
match ct {
ContentType::Movie => "movie",
ContentType::Episode => "episode",
ContentType::Short => "short",
}
}
fn parse_content_type(s: &str) -> ContentType {
match s {
"episode" => ContentType::Episode,
"short" => ContentType::Short,
_ => ContentType::Movie,
}
}
#[async_trait]
impl ILibraryRepository for SqliteLibraryRepository {
async fn search(&self, filter: &LibrarySearchFilter) -> DomainResult<(Vec<LibraryItem>, u32)> {
let mut conditions: Vec<String> = vec![];
if let Some(ref p) = filter.provider_id {
conditions.push(format!("provider_id = '{}'", p.replace('\'', "''")));
}
if let Some(ref ct) = filter.content_type {
conditions.push(format!("content_type = '{}'", content_type_str(ct)));
}
if let Some(ref st) = filter.search_term {
conditions.push(format!("title LIKE '%{}%'", st.replace('\'', "''")));
}
if let Some(ref cid) = filter.collection_id {
conditions.push(format!("collection_id = '{}'", cid.replace('\'', "''")));
}
if let Some(decade) = filter.decade {
let end = decade + 10;
conditions.push(format!("year >= {} AND year < {}", decade, end));
}
if let Some(min) = filter.min_duration_secs {
conditions.push(format!("duration_secs >= {}", min));
}
if let Some(max) = filter.max_duration_secs {
conditions.push(format!("duration_secs <= {}", max));
}
if !filter.series_names.is_empty() {
let quoted: Vec<String> = filter.series_names.iter()
.map(|s| format!("'{}'", s.replace('\'', "''")))
.collect();
conditions.push(format!("series_name IN ({})", quoted.join(",")));
}
if !filter.genres.is_empty() {
let genre_conditions: Vec<String> = filter.genres.iter()
.map(|g| format!("EXISTS (SELECT 1 FROM json_each(library_items.genres) WHERE value = '{}')", g.replace('\'', "''")))
.collect();
conditions.push(format!("({})", genre_conditions.join(" OR ")));
}
if let Some(sn) = filter.season_number {
conditions.push(format!("season_number = {}", sn));
}
let where_clause = if conditions.is_empty() {
String::new()
} else {
format!("WHERE {}", conditions.join(" AND "))
};
let count_sql = format!("SELECT COUNT(*) FROM library_items {}", where_clause);
let total: i64 = sqlx::query_scalar(&count_sql)
.fetch_one(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
let items_sql = format!(
"SELECT * FROM library_items {} ORDER BY title ASC LIMIT {} OFFSET {}",
where_clause, filter.limit, filter.offset
);
let rows = sqlx::query_as::<_, LibraryItemRow>(&items_sql)
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok((rows.into_iter().map(Into::into).collect(), total as u32))
}
async fn get_by_id(&self, id: &str) -> DomainResult<Option<LibraryItem>> {
let row = sqlx::query_as::<_, LibraryItemRow>(
"SELECT * FROM library_items WHERE id = ?"
)
.bind(id)
.fetch_optional(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(row.map(Into::into))
}
async fn list_collections(&self, provider_id: Option<&str>) -> DomainResult<Vec<LibraryCollection>> {
let rows: Vec<(String, Option<String>, Option<String>)> = if let Some(p) = provider_id {
sqlx::query_as::<_, (String, Option<String>, Option<String>)>(
"SELECT DISTINCT collection_id, collection_name, collection_type
FROM library_items WHERE collection_id IS NOT NULL AND provider_id = ?
ORDER BY collection_name ASC"
).bind(p).fetch_all(&self.pool).await
} else {
sqlx::query_as::<_, (String, Option<String>, Option<String>)>(
"SELECT DISTINCT collection_id, collection_name, collection_type
FROM library_items WHERE collection_id IS NOT NULL
ORDER BY collection_name ASC"
).fetch_all(&self.pool).await
}.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(rows.into_iter().map(|(id, name, ct)| LibraryCollection {
id,
name: name.unwrap_or_default(),
collection_type: ct,
}).collect())
}
async fn list_series(&self, provider_id: Option<&str>) -> DomainResult<Vec<String>> {
let rows: Vec<(String,)> = if let Some(p) = provider_id {
sqlx::query_as(
"SELECT DISTINCT series_name FROM library_items
WHERE series_name IS NOT NULL AND provider_id = ? ORDER BY series_name ASC"
).bind(p).fetch_all(&self.pool).await
} else {
sqlx::query_as(
"SELECT DISTINCT series_name FROM library_items
WHERE series_name IS NOT NULL ORDER BY series_name ASC"
).fetch_all(&self.pool).await
}.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(rows.into_iter().map(|(s,)| s).collect())
}
async fn list_genres(&self, content_type: Option<&ContentType>, provider_id: Option<&str>) -> DomainResult<Vec<String>> {
let sql = match (content_type, provider_id) {
(Some(ct), Some(p)) => format!(
"SELECT DISTINCT je.value FROM library_items li, json_each(li.genres) je
WHERE li.content_type = '{}' AND li.provider_id = '{}' ORDER BY je.value ASC",
content_type_str(ct), p.replace('\'', "''")
),
(Some(ct), None) => format!(
"SELECT DISTINCT je.value FROM library_items li, json_each(li.genres) je
WHERE li.content_type = '{}' ORDER BY je.value ASC",
content_type_str(ct)
),
(None, Some(p)) => format!(
"SELECT DISTINCT je.value FROM library_items li, json_each(li.genres) je
WHERE li.provider_id = '{}' ORDER BY je.value ASC",
p.replace('\'', "''")
),
(None, None) => "SELECT DISTINCT je.value FROM library_items li, json_each(li.genres) je ORDER BY je.value ASC".to_string(),
};
let rows: Vec<(String,)> = sqlx::query_as(&sql)
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(rows.into_iter().map(|(s,)| s).collect())
}
async fn upsert_items(&self, _provider_id: &str, items: Vec<LibraryItem>) -> DomainResult<()> {
let mut tx = self.pool.begin().await.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
for item in items {
sqlx::query(
"INSERT OR REPLACE INTO library_items
(id, provider_id, external_id, title, content_type, duration_secs,
series_name, season_number, episode_number, year, genres, tags,
collection_id, collection_name, collection_type, thumbnail_url, synced_at)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"
)
.bind(&item.id).bind(&item.provider_id).bind(&item.external_id)
.bind(&item.title).bind(content_type_str(&item.content_type))
.bind(item.duration_secs as i64)
.bind(&item.series_name).bind(item.season_number.map(|n| n as i64))
.bind(item.episode_number.map(|n| n as i64))
.bind(item.year.map(|n| n as i64))
.bind(serde_json::to_string(&item.genres).unwrap_or_default())
.bind(serde_json::to_string(&item.tags).unwrap_or_default())
.bind(&item.collection_id).bind(&item.collection_name)
.bind(&item.collection_type).bind(&item.thumbnail_url)
.bind(&item.synced_at)
.execute(&mut *tx)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
}
tx.commit().await.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
async fn clear_provider(&self, provider_id: &str) -> DomainResult<()> {
sqlx::query("DELETE FROM library_items WHERE provider_id = ?")
.bind(provider_id)
.execute(&self.pool)
.await
.map(|_| ())
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
async fn log_sync_start(&self, provider_id: &str) -> DomainResult<i64> {
let now = chrono::Utc::now().to_rfc3339();
let id = sqlx::query_scalar::<_, i64>(
"INSERT INTO library_sync_log (provider_id, started_at, status)
VALUES (?, ?, 'running') RETURNING id"
)
.bind(provider_id).bind(&now)
.fetch_one(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(id)
}
async fn log_sync_finish(&self, log_id: i64, result: &LibrarySyncResult) -> DomainResult<()> {
let now = chrono::Utc::now().to_rfc3339();
let status = if result.error.is_none() { "done" } else { "error" };
sqlx::query(
"UPDATE library_sync_log
SET finished_at = ?, items_found = ?, status = ?, error_msg = ?
WHERE id = ?"
)
.bind(&now).bind(result.items_found as i64)
.bind(status).bind(&result.error).bind(log_id)
.execute(&self.pool)
.await
.map(|_| ())
.map_err(|e| DomainError::InfrastructureError(e.to_string()))
}
async fn latest_sync_status(&self) -> DomainResult<Vec<LibrarySyncLogEntry>> {
let rows = sqlx::query_as::<_, SyncLogRow>(
"SELECT * FROM library_sync_log
WHERE id IN (
SELECT MAX(id) FROM library_sync_log GROUP BY provider_id
)
ORDER BY started_at DESC"
)
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(rows.into_iter().map(|r| LibrarySyncLogEntry {
id: r.id, provider_id: r.provider_id, started_at: r.started_at,
finished_at: r.finished_at, items_found: r.items_found as u32,
status: r.status, error_msg: r.error_msg,
}).collect())
}
async fn is_sync_running(&self, provider_id: &str) -> DomainResult<bool> {
let count: i64 = sqlx::query_scalar(
"SELECT COUNT(*) FROM library_sync_log WHERE provider_id = ? AND status = 'running'"
)
.bind(provider_id)
.fetch_one(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(count > 0)
}
async fn list_shows(
&self,
provider_id: Option<&str>,
search_term: Option<&str>,
genres: &[String],
) -> DomainResult<Vec<ShowSummary>> {
let mut conditions = vec![
"content_type = 'episode'".to_string(),
"series_name IS NOT NULL".to_string(),
];
if let Some(p) = provider_id {
conditions.push(format!("provider_id = '{}'", p.replace('\'', "''")));
}
if let Some(st) = search_term {
let escaped = st.replace('\'', "''");
conditions.push(format!(
"(title LIKE '%{escaped}%' OR series_name LIKE '%{escaped}%')"
));
}
if !genres.is_empty() {
let genre_conditions: Vec<String> = genres
.iter()
.map(|g| format!(
"EXISTS (SELECT 1 FROM json_each(library_items.genres) WHERE value = '{}')",
g.replace('\'', "''")
))
.collect();
conditions.push(format!("({})", genre_conditions.join(" OR ")));
}
let where_clause = format!("WHERE {}", conditions.join(" AND "));
let sql = format!(
"SELECT series_name, COUNT(*) AS episode_count, COUNT(DISTINCT season_number) AS season_count, MAX(thumbnail_url) AS thumbnail_url, GROUP_CONCAT(genres, ',') AS genres_blob FROM library_items {} GROUP BY series_name ORDER BY series_name ASC",
where_clause
);
let rows = sqlx::query_as::<_, ShowSummaryRow>(&sql)
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(rows
.into_iter()
.map(|r| {
let genres: Vec<String> = r
.genres_blob
.split("],[")
.flat_map(|chunk| {
let cleaned = chunk.trim_start_matches('[').trim_end_matches(']');
cleaned
.split(',')
.filter_map(|s| {
let s = s.trim().trim_matches('"');
if s.is_empty() { None } else { Some(s.to_string()) }
})
.collect::<Vec<_>>()
})
.collect::<std::collections::HashSet<_>>()
.into_iter()
.collect();
ShowSummary {
series_name: r.series_name,
episode_count: r.episode_count as u32,
season_count: r.season_count as u32,
thumbnail_url: r.thumbnail_url,
genres,
}
})
.collect())
}
async fn list_seasons(
&self,
series_name: &str,
provider_id: Option<&str>,
) -> DomainResult<Vec<SeasonSummary>> {
let mut conditions = vec![
format!("series_name = '{}'", series_name.replace('\'', "''")),
"content_type = 'episode'".to_string(),
];
if let Some(p) = provider_id {
conditions.push(format!("provider_id = '{}'", p.replace('\'', "''")));
}
let where_clause = format!("WHERE {}", conditions.join(" AND "));
let sql = format!(
"SELECT season_number, COUNT(*) AS episode_count, MAX(thumbnail_url) AS thumbnail_url FROM library_items {} GROUP BY season_number ORDER BY season_number ASC",
where_clause
);
let rows = sqlx::query_as::<_, SeasonSummaryRow>(&sql)
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::InfrastructureError(e.to_string()))?;
Ok(rows
.into_iter()
.map(|r| SeasonSummary {
season_number: r.season_number as u32,
episode_count: r.episode_count as u32,
thumbnail_url: r.thumbnail_url,
})
.collect())
}
}
// ── SQLx row types ─────────────────────────────────────────────────────────
#[derive(sqlx::FromRow)]
struct LibraryItemRow {
id: String, provider_id: String, external_id: String, title: String,
content_type: String, duration_secs: i64,
series_name: Option<String>, season_number: Option<i64>, episode_number: Option<i64>,
year: Option<i64>, genres: String, tags: String,
collection_id: Option<String>, collection_name: Option<String>, collection_type: Option<String>,
thumbnail_url: Option<String>, synced_at: String,
}
impl From<LibraryItemRow> for LibraryItem {
fn from(r: LibraryItemRow) -> Self {
Self {
id: r.id, provider_id: r.provider_id, external_id: r.external_id,
title: r.title, content_type: parse_content_type(&r.content_type),
duration_secs: r.duration_secs as u32,
series_name: r.series_name,
season_number: r.season_number.map(|n| n as u32),
episode_number: r.episode_number.map(|n| n as u32),
year: r.year.map(|n| n as u16),
genres: serde_json::from_str(&r.genres).unwrap_or_default(),
tags: serde_json::from_str(&r.tags).unwrap_or_default(),
collection_id: r.collection_id, collection_name: r.collection_name,
collection_type: r.collection_type, thumbnail_url: r.thumbnail_url,
synced_at: r.synced_at,
}
}
}
#[derive(sqlx::FromRow)]
struct SyncLogRow {
id: i64, provider_id: String, started_at: String, finished_at: Option<String>,
items_found: i64, status: String, error_msg: Option<String>,
}
#[derive(sqlx::FromRow)]
struct ShowSummaryRow {
series_name: String,
episode_count: i64,
season_count: i64,
thumbnail_url: Option<String>,
genres_blob: String,
}
#[derive(sqlx::FromRow)]
struct SeasonSummaryRow {
season_number: i64,
episode_count: i64,
thumbnail_url: Option<String>,
}
#[cfg(test)]
mod tests {
use super::*;
use sqlx::SqlitePool;
use domain::{LibraryItem, LibrarySearchFilter, ContentType};
async fn setup() -> SqlitePool {
let pool = SqlitePool::connect(":memory:").await.unwrap();
sqlx::query(
"CREATE TABLE library_items (
id TEXT PRIMARY KEY, provider_id TEXT NOT NULL, external_id TEXT NOT NULL,
title TEXT NOT NULL, content_type TEXT NOT NULL, duration_secs INTEGER NOT NULL DEFAULT 0,
series_name TEXT, season_number INTEGER, episode_number INTEGER, year INTEGER,
genres TEXT NOT NULL DEFAULT '[]', tags TEXT NOT NULL DEFAULT '[]',
collection_id TEXT, collection_name TEXT, collection_type TEXT,
thumbnail_url TEXT, synced_at TEXT NOT NULL
)"
).execute(&pool).await.unwrap();
sqlx::query(
"CREATE TABLE library_sync_log (
id INTEGER PRIMARY KEY AUTOINCREMENT, provider_id TEXT NOT NULL,
started_at TEXT NOT NULL, finished_at TEXT, items_found INTEGER NOT NULL DEFAULT 0,
status TEXT NOT NULL DEFAULT 'running', error_msg TEXT
)"
).execute(&pool).await.unwrap();
pool
}
fn make_item(id: &str, provider: &str, title: &str) -> LibraryItem {
LibraryItem {
id: id.to_string(), provider_id: provider.to_string(), external_id: id.to_string(),
title: title.to_string(), content_type: ContentType::Movie,
duration_secs: 3600, series_name: None, season_number: None, episode_number: None,
year: Some(2020), genres: vec!["Action".to_string()], tags: vec![],
collection_id: None, collection_name: None, collection_type: None,
thumbnail_url: None, synced_at: "2026-03-19T00:00:00Z".to_string(),
}
}
#[tokio::test]
async fn upsert_then_search_returns_items() {
let pool = setup().await;
let repo = SqliteLibraryRepository::new(pool);
let items = vec![make_item("jellyfin::1", "jellyfin", "Movie A")];
repo.upsert_items("jellyfin", items).await.unwrap();
let (results, total) = repo.search(&LibrarySearchFilter { limit: 50, ..Default::default() }).await.unwrap();
assert_eq!(total, 1);
assert_eq!(results[0].title, "Movie A");
}
#[tokio::test]
async fn clear_provider_removes_only_that_provider() {
let pool = setup().await;
let repo = SqliteLibraryRepository::new(pool);
repo.upsert_items("jellyfin", vec![make_item("jellyfin::1", "jellyfin", "Jelly Movie")]).await.unwrap();
repo.upsert_items("local", vec![make_item("local::1", "local", "Local Movie")]).await.unwrap();
repo.clear_provider("jellyfin").await.unwrap();
let (results, _) = repo.search(&LibrarySearchFilter { limit: 50, ..Default::default() }).await.unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0].provider_id, "local");
}
#[tokio::test]
async fn is_sync_running_reflects_status() {
let pool = setup().await;
let repo = SqliteLibraryRepository::new(pool);
assert!(!repo.is_sync_running("jellyfin").await.unwrap());
let log_id = repo.log_sync_start("jellyfin").await.unwrap();
assert!(repo.is_sync_running("jellyfin").await.unwrap());
let result = domain::LibrarySyncResult {
provider_id: "jellyfin".to_string(), items_found: 5, duration_ms: 100, error: None,
};
repo.log_sync_finish(log_id, &result).await.unwrap();
assert!(!repo.is_sync_running("jellyfin").await.unwrap());
}
}

View File

@@ -0,0 +1,249 @@
//! Full-sync library sync adapter: truncate + re-insert all provider items.
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Instant;
use async_trait::async_trait;
use domain::{
Collection, ILibraryRepository, IMediaProvider, LibraryItem,
LibrarySyncAdapter, LibrarySyncResult, MediaFilter,
};
pub struct FullSyncAdapter {
repo: Arc<dyn ILibraryRepository>,
}
impl FullSyncAdapter {
pub fn new(repo: Arc<dyn ILibraryRepository>) -> Self {
Self { repo }
}
}
#[async_trait]
impl LibrarySyncAdapter for FullSyncAdapter {
async fn sync_provider(
&self,
provider: &dyn IMediaProvider,
provider_id: &str,
) -> LibrarySyncResult {
let start = Instant::now();
// Check for running sync first
match self.repo.is_sync_running(provider_id).await {
Ok(true) => {
return LibrarySyncResult {
provider_id: provider_id.to_string(),
items_found: 0,
duration_ms: 0,
error: Some("sync already running".to_string()),
};
}
Err(e) => {
return LibrarySyncResult {
provider_id: provider_id.to_string(),
items_found: 0,
duration_ms: 0,
error: Some(e.to_string()),
};
}
Ok(false) => {}
}
let log_id = match self.repo.log_sync_start(provider_id).await {
Ok(id) => id,
Err(e) => {
return LibrarySyncResult {
provider_id: provider_id.to_string(),
items_found: 0,
duration_ms: start.elapsed().as_millis() as u64,
error: Some(e.to_string()),
};
}
};
// Fetch collections for name/type enrichment — build a lookup map
let collections: Vec<Collection> = provider.list_collections().await.unwrap_or_default();
let collection_map: HashMap<String, &Collection> =
collections.iter().map(|c| (c.id.clone(), c)).collect();
// Fetch all items
let media_items = match provider.fetch_items(&MediaFilter::default()).await {
Ok(items) => items,
Err(e) => {
let result = LibrarySyncResult {
provider_id: provider_id.to_string(),
items_found: 0,
duration_ms: start.elapsed().as_millis() as u64,
error: Some(e.to_string()),
};
let _ = self.repo.log_sync_finish(log_id, &result).await;
return result;
}
};
let items_found = media_items.len() as u32;
let now = chrono::Utc::now().to_rfc3339();
let library_items: Vec<LibraryItem> = media_items
.into_iter()
.map(|item| {
let raw_id = item.id.into_inner();
let id = format!("{}::{}", provider_id, raw_id);
// Enrich with collection name/type using the lookup map.
let (col_name, col_type) = item.collection_id.as_deref()
.and_then(|cid| collection_map.get(cid))
.map(|c| (Some(c.name.clone()), c.collection_type.clone()))
.unwrap_or((None, None));
LibraryItem {
id,
provider_id: provider_id.to_string(),
external_id: raw_id,
title: item.title,
content_type: item.content_type,
duration_secs: item.duration_secs,
series_name: item.series_name,
season_number: item.season_number,
episode_number: item.episode_number,
year: item.year,
genres: item.genres,
tags: item.tags,
collection_id: item.collection_id,
collection_name: col_name,
collection_type: col_type,
thumbnail_url: item.thumbnail_url,
synced_at: now.clone(),
}
})
.collect();
// Truncate + insert
if let Err(e) = self.repo.clear_provider(provider_id).await {
let result = LibrarySyncResult {
provider_id: provider_id.to_string(),
items_found: 0,
duration_ms: start.elapsed().as_millis() as u64,
error: Some(e.to_string()),
};
let _ = self.repo.log_sync_finish(log_id, &result).await;
return result;
}
let result = match self.repo.upsert_items(provider_id, library_items).await {
Ok(()) => LibrarySyncResult {
provider_id: provider_id.to_string(),
items_found,
duration_ms: start.elapsed().as_millis() as u64,
error: None,
},
Err(e) => LibrarySyncResult {
provider_id: provider_id.to_string(),
items_found: 0,
duration_ms: start.elapsed().as_millis() as u64,
error: Some(e.to_string()),
},
};
let _ = self.repo.log_sync_finish(log_id, &result).await;
result
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::{Arc, Mutex};
use async_trait::async_trait;
use domain::*;
struct MockProvider {
items: Vec<MediaItem>,
}
#[async_trait]
impl IMediaProvider for MockProvider {
fn capabilities(&self) -> ProviderCapabilities {
ProviderCapabilities {
collections: true,
series: false,
genres: false,
tags: false,
decade: false,
search: false,
streaming_protocol: StreamingProtocol::Hls,
rescan: false,
transcode: false,
}
}
async fn fetch_items(&self, _filter: &MediaFilter) -> DomainResult<Vec<MediaItem>> {
Ok(self.items.clone())
}
async fn fetch_by_id(&self, _id: &MediaItemId) -> DomainResult<Option<MediaItem>> { Ok(None) }
async fn get_stream_url(&self, _id: &MediaItemId, _q: &StreamQuality) -> DomainResult<String> { Ok(String::new()) }
async fn list_collections(&self) -> DomainResult<Vec<Collection>> { Ok(vec![]) }
async fn list_series(&self, _col: Option<&str>) -> DomainResult<Vec<SeriesSummary>> { Ok(vec![]) }
async fn list_genres(&self, _ct: Option<&ContentType>) -> DomainResult<Vec<String>> { Ok(vec![]) }
}
struct SpyRepo {
upserted: Arc<Mutex<Vec<LibraryItem>>>,
cleared: Arc<Mutex<Vec<String>>>,
}
#[async_trait]
impl ILibraryRepository for SpyRepo {
async fn search(&self, _f: &LibrarySearchFilter) -> DomainResult<(Vec<LibraryItem>, u32)> { Ok((vec![], 0)) }
async fn get_by_id(&self, _id: &str) -> DomainResult<Option<LibraryItem>> { Ok(None) }
async fn list_collections(&self, _p: Option<&str>) -> DomainResult<Vec<LibraryCollection>> { Ok(vec![]) }
async fn list_series(&self, _p: Option<&str>) -> DomainResult<Vec<String>> { Ok(vec![]) }
async fn list_genres(&self, _ct: Option<&ContentType>, _p: Option<&str>) -> DomainResult<Vec<String>> { Ok(vec![]) }
async fn upsert_items(&self, _pid: &str, items: Vec<LibraryItem>) -> DomainResult<()> {
self.upserted.lock().unwrap().extend(items);
Ok(())
}
async fn clear_provider(&self, pid: &str) -> DomainResult<()> {
self.cleared.lock().unwrap().push(pid.to_string());
Ok(())
}
async fn log_sync_start(&self, _pid: &str) -> DomainResult<i64> { Ok(1) }
async fn log_sync_finish(&self, _id: i64, _r: &LibrarySyncResult) -> DomainResult<()> { Ok(()) }
async fn latest_sync_status(&self) -> DomainResult<Vec<LibrarySyncLogEntry>> { Ok(vec![]) }
async fn is_sync_running(&self, _pid: &str) -> DomainResult<bool> { Ok(false) }
async fn list_shows(&self, _p: Option<&str>, _st: Option<&str>, _g: &[String]) -> DomainResult<Vec<domain::ShowSummary>> { Ok(vec![]) }
async fn list_seasons(&self, _sn: &str, _p: Option<&str>) -> DomainResult<Vec<domain::SeasonSummary>> { Ok(vec![]) }
}
#[tokio::test]
async fn sync_clears_then_upserts_items() {
let upserted = Arc::new(Mutex::new(vec![]));
let cleared = Arc::new(Mutex::new(vec![]));
let repo: Arc<dyn ILibraryRepository> = Arc::new(SpyRepo {
upserted: Arc::clone(&upserted),
cleared: Arc::clone(&cleared),
});
let adapter = FullSyncAdapter::new(Arc::clone(&repo));
let provider = MockProvider {
items: vec![MediaItem {
id: MediaItemId::new("abc".to_string()),
title: "Test Movie".to_string(),
content_type: ContentType::Movie,
duration_secs: 3600,
description: None,
series_name: None,
season_number: None,
episode_number: None,
year: None,
genres: vec![],
tags: vec![],
thumbnail_url: None,
collection_id: None,
}],
};
let result = adapter.sync_provider(&provider, "jellyfin").await;
assert!(result.error.is_none());
assert_eq!(result.items_found, 1);
assert_eq!(cleared.lock().unwrap().as_slice(), &["jellyfin"]);
assert_eq!(upserted.lock().unwrap().len(), 1);
}
}

View File

@@ -36,15 +36,17 @@ pub fn decode_id(id: &MediaItemId) -> Option<String> {
pub struct LocalIndex { pub struct LocalIndex {
items: Arc<RwLock<HashMap<MediaItemId, LocalFileItem>>>, items: Arc<RwLock<HashMap<MediaItemId, LocalFileItem>>>,
pub root_dir: PathBuf, pub root_dir: PathBuf,
provider_id: String,
pool: sqlx::SqlitePool, pool: sqlx::SqlitePool,
} }
impl LocalIndex { impl LocalIndex {
/// Create the index, immediately loading persisted entries from SQLite. /// Create the index, immediately loading persisted entries from SQLite.
pub async fn new(config: &LocalFilesConfig, pool: sqlx::SqlitePool) -> Self { pub async fn new(config: &LocalFilesConfig, pool: sqlx::SqlitePool, provider_id: String) -> Self {
let idx = Self { let idx = Self {
items: Arc::new(RwLock::new(HashMap::new())), items: Arc::new(RwLock::new(HashMap::new())),
root_dir: config.root_dir.clone(), root_dir: config.root_dir.clone(),
provider_id,
pool, pool,
}; };
idx.load_from_db().await; idx.load_from_db().await;
@@ -65,8 +67,10 @@ impl LocalIndex {
} }
let rows = sqlx::query_as::<_, Row>( let rows = sqlx::query_as::<_, Row>(
"SELECT id, rel_path, title, duration_secs, year, tags, top_dir FROM local_files_index", "SELECT id, rel_path, title, duration_secs, year, tags, top_dir \
FROM local_files_index WHERE provider_id = ?",
) )
.bind(&self.provider_id)
.fetch_all(&self.pool) .fetch_all(&self.pool)
.await; .await;
@@ -86,7 +90,7 @@ impl LocalIndex {
}; };
map.insert(MediaItemId::new(row.id), item); map.insert(MediaItemId::new(row.id), item);
} }
info!("Local files index: loaded {} items from DB", map.len()); info!("Local files index [{}]: loaded {} items from DB", self.provider_id, map.len());
} }
Err(e) => { Err(e) => {
// Table might not exist yet on first run — that's fine. // Table might not exist yet on first run — that's fine.
@@ -100,7 +104,7 @@ impl LocalIndex {
/// Returns the number of items found. Called on startup (background task) /// Returns the number of items found. Called on startup (background task)
/// and via `POST /files/rescan`. /// and via `POST /files/rescan`.
pub async fn rescan(&self) -> u32 { pub async fn rescan(&self) -> u32 {
info!("Local files: scanning {:?}", self.root_dir); info!("Local files [{}]: scanning {:?}", self.provider_id, self.root_dir);
let new_items = scan_dir(&self.root_dir).await; let new_items = scan_dir(&self.root_dir).await;
let count = new_items.len() as u32; let count = new_items.len() as u32;
@@ -119,15 +123,16 @@ impl LocalIndex {
error!("Failed to persist local files index: {}", e); error!("Failed to persist local files index: {}", e);
} }
info!("Local files: indexed {} items", count); info!("Local files [{}]: indexed {} items", self.provider_id, count);
count count
} }
async fn save_to_db(&self, items: &[LocalFileItem]) -> Result<(), sqlx::Error> { async fn save_to_db(&self, items: &[LocalFileItem]) -> Result<(), sqlx::Error> {
// Rebuild the table in one transaction. // Rebuild the table in one transaction, scoped to this provider.
let mut tx = self.pool.begin().await?; let mut tx = self.pool.begin().await?;
sqlx::query("DELETE FROM local_files_index") sqlx::query("DELETE FROM local_files_index WHERE provider_id = ?")
.bind(&self.provider_id)
.execute(&mut *tx) .execute(&mut *tx)
.await?; .await?;
@@ -137,8 +142,8 @@ impl LocalIndex {
let tags_json = serde_json::to_string(&item.tags).unwrap_or_else(|_| "[]".into()); let tags_json = serde_json::to_string(&item.tags).unwrap_or_else(|_| "[]".into());
sqlx::query( sqlx::query(
"INSERT INTO local_files_index \ "INSERT INTO local_files_index \
(id, rel_path, title, duration_secs, year, tags, top_dir, scanned_at) \ (id, rel_path, title, duration_secs, year, tags, top_dir, scanned_at, provider_id) \
VALUES (?, ?, ?, ?, ?, ?, ?, ?)", VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
) )
.bind(&id) .bind(&id)
.bind(&item.rel_path) .bind(&item.rel_path)
@@ -148,6 +153,7 @@ impl LocalIndex {
.bind(&tags_json) .bind(&tags_json)
.bind(&item.top_dir) .bind(&item.top_dir)
.bind(&now) .bind(&now)
.bind(&self.provider_id)
.execute(&mut *tx) .execute(&mut *tx)
.await?; .await?;
} }

View File

@@ -51,6 +51,8 @@ fn to_media_item(id: MediaItemId, item: &LocalFileItem) -> MediaItem {
series_name: None, series_name: None,
season_number: None, season_number: None,
episode_number: None, episode_number: None,
thumbnail_url: None,
collection_id: None,
} }
} }
@@ -86,11 +88,9 @@ impl IMediaProvider for LocalFilesProvider {
} else { } else {
ContentType::Movie ContentType::Movie
}; };
if let Some(ref ct) = filter.content_type { if let Some(ref ct) = filter.content_type && &content_type != ct {
if &content_type != ct {
return None; return None;
} }
}
// collections: match against top_dir // collections: match against top_dir
if !filter.collections.is_empty() && !filter.collections.contains(&item.top_dir) { if !filter.collections.is_empty() && !filter.collections.contains(&item.top_dir) {
@@ -117,23 +117,17 @@ impl IMediaProvider for LocalFilesProvider {
} }
// duration bounds // duration bounds
if let Some(min) = filter.min_duration_secs { if let Some(min) = filter.min_duration_secs && item.duration_secs < min {
if item.duration_secs < min {
return None; return None;
} }
} if let Some(max) = filter.max_duration_secs && item.duration_secs > max {
if let Some(max) = filter.max_duration_secs {
if item.duration_secs > max {
return None; return None;
} }
}
// search_term: case-insensitive substring in title // search_term: case-insensitive substring in title
if let Some(ref q) = filter.search_term { if let Some(ref q) = filter.search_term && !item.title.to_lowercase().contains(&q.to_lowercase()) {
if !item.title.to_lowercase().contains(&q.to_lowercase()) {
return None; return None;
} }
}
Some(to_media_item(id, &item)) Some(to_media_item(id, &item))
}) })

View File

@@ -171,19 +171,17 @@ impl TranscodeManager {
continue; continue;
} }
let playlist = path.join("playlist.m3u8"); let playlist = path.join("playlist.m3u8");
if let Ok(meta) = tokio::fs::metadata(&playlist).await { if let Ok(meta) = tokio::fs::metadata(&playlist).await
if let Ok(modified) = meta.modified() { && let Ok(modified) = meta.modified()
if let Ok(age) = now.duration_since(modified) { && let Ok(age) = now.duration_since(modified)
if age > ttl { && age > ttl
{
warn!("cleanup: removing stale transcode {:?}", path); warn!("cleanup: removing stale transcode {:?}", path);
let _ = tokio::fs::remove_dir_all(&path).await; let _ = tokio::fs::remove_dir_all(&path).await;
} }
} }
} }
} }
}
}
}
// ============================================================================ // ============================================================================
// FFmpeg helper // FFmpeg helper

View File

@@ -15,8 +15,8 @@ impl SqliteProviderConfigRepository {
#[async_trait] #[async_trait]
impl ProviderConfigRepository for SqliteProviderConfigRepository { impl ProviderConfigRepository for SqliteProviderConfigRepository {
async fn get_all(&self) -> DomainResult<Vec<ProviderConfigRow>> { async fn get_all(&self) -> DomainResult<Vec<ProviderConfigRow>> {
let rows: Vec<(String, String, i64, String)> = sqlx::query_as( let rows: Vec<(String, String, String, i64, String)> = sqlx::query_as(
"SELECT provider_type, config_json, enabled, updated_at FROM provider_configs", "SELECT id, provider_type, config_json, enabled, updated_at FROM provider_configs",
) )
.fetch_all(&self.pool) .fetch_all(&self.pool)
.await .await
@@ -24,7 +24,8 @@ impl ProviderConfigRepository for SqliteProviderConfigRepository {
Ok(rows Ok(rows
.into_iter() .into_iter()
.map(|(provider_type, config_json, enabled, updated_at)| ProviderConfigRow { .map(|(id, provider_type, config_json, enabled, updated_at)| ProviderConfigRow {
id,
provider_type, provider_type,
config_json, config_json,
enabled: enabled != 0, enabled: enabled != 0,
@@ -33,15 +34,35 @@ impl ProviderConfigRepository for SqliteProviderConfigRepository {
.collect()) .collect())
} }
async fn get_by_id(&self, id: &str) -> DomainResult<Option<ProviderConfigRow>> {
let row: Option<(String, String, String, i64, String)> = sqlx::query_as(
"SELECT id, provider_type, config_json, enabled, updated_at FROM provider_configs WHERE id = ?",
)
.bind(id)
.fetch_optional(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
Ok(row.map(|(id, provider_type, config_json, enabled, updated_at)| ProviderConfigRow {
id,
provider_type,
config_json,
enabled: enabled != 0,
updated_at,
}))
}
async fn upsert(&self, row: &ProviderConfigRow) -> DomainResult<()> { async fn upsert(&self, row: &ProviderConfigRow) -> DomainResult<()> {
sqlx::query( sqlx::query(
r#"INSERT INTO provider_configs (provider_type, config_json, enabled, updated_at) r#"INSERT INTO provider_configs (id, provider_type, config_json, enabled, updated_at)
VALUES (?, ?, ?, ?) VALUES (?, ?, ?, ?, ?)
ON CONFLICT(provider_type) DO UPDATE SET ON CONFLICT(id) DO UPDATE SET
provider_type = excluded.provider_type,
config_json = excluded.config_json, config_json = excluded.config_json,
enabled = excluded.enabled, enabled = excluded.enabled,
updated_at = excluded.updated_at"#, updated_at = excluded.updated_at"#,
) )
.bind(&row.id)
.bind(&row.provider_type) .bind(&row.provider_type)
.bind(&row.config_json) .bind(&row.config_json)
.bind(row.enabled as i64) .bind(row.enabled as i64)
@@ -52,9 +73,9 @@ impl ProviderConfigRepository for SqliteProviderConfigRepository {
Ok(()) Ok(())
} }
async fn delete(&self, provider_type: &str) -> DomainResult<()> { async fn delete(&self, id: &str) -> DomainResult<()> {
sqlx::query("DELETE FROM provider_configs WHERE provider_type = ?") sqlx::query("DELETE FROM provider_configs WHERE id = ?")
.bind(provider_type) .bind(id)
.execute(&self.pool) .execute(&self.pool)
.await .await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?; .map_err(|e| DomainError::RepositoryError(e.to_string()))?;

View File

@@ -42,6 +42,14 @@ impl ProviderRegistry {
self.providers.is_empty() self.providers.is_empty()
} }
/// Return the provider registered under `id`, if any.
pub fn get_provider(&self, id: &str) -> Option<Arc<dyn IMediaProvider>> {
self.providers
.iter()
.find(|(pid, _)| pid == id)
.map(|(_, p)| Arc::clone(p))
}
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Internal helpers // Internal helpers
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------

View File

@@ -29,6 +29,12 @@ pub(super) struct SlotRow {
pub source_block_id: String, pub source_block_id: String,
} }
#[derive(Debug, FromRow)]
pub(super) struct LastSlotRow {
pub source_block_id: String,
pub item: String,
}
#[derive(Debug, FromRow)] #[derive(Debug, FromRow)]
pub(super) struct PlaybackRecordRow { pub(super) struct PlaybackRecordRow {
pub id: String, pub id: String,

View File

@@ -1,9 +1,11 @@
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use domain::{ChannelId, DomainError, DomainResult, GeneratedSchedule, PlaybackRecord, ScheduleRepository}; use std::collections::HashMap;
use super::mapping::{map_schedule, PlaybackRecordRow, ScheduleRow, SlotRow}; use domain::{BlockId, ChannelId, DomainError, DomainResult, GeneratedSchedule, MediaItemId, PlaybackRecord, ScheduleRepository};
use super::mapping::{map_schedule, LastSlotRow, PlaybackRecordRow, ScheduleRow, SlotRow};
pub struct PostgresScheduleRepository { pub struct PostgresScheduleRepository {
pool: sqlx::Pool<sqlx::Postgres>, pool: sqlx::Pool<sqlx::Postgres>,
@@ -143,6 +145,41 @@ impl ScheduleRepository for PostgresScheduleRepository {
rows.into_iter().map(PlaybackRecord::try_from).collect() rows.into_iter().map(PlaybackRecord::try_from).collect()
} }
async fn find_last_slot_per_block(
&self,
channel_id: ChannelId,
) -> DomainResult<HashMap<BlockId, MediaItemId>> {
let channel_id_str = channel_id.to_string();
let rows: Vec<LastSlotRow> = sqlx::query_as(
"SELECT ss.source_block_id, ss.item \
FROM scheduled_slots ss \
INNER JOIN generated_schedules gs ON gs.id = ss.schedule_id \
WHERE gs.channel_id = $1 \
AND ss.start_at = ( \
SELECT MAX(ss2.start_at) \
FROM scheduled_slots ss2 \
INNER JOIN generated_schedules gs2 ON gs2.id = ss2.schedule_id \
WHERE ss2.source_block_id = ss.source_block_id \
AND gs2.channel_id = $2 \
)",
)
.bind(&channel_id_str)
.bind(&channel_id_str)
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
let mut map = HashMap::new();
for row in rows {
let block_id = uuid::Uuid::parse_str(&row.source_block_id)
.map_err(|e| DomainError::RepositoryError(format!("Invalid block UUID: {}", e)))?;
let item: domain::MediaItem = serde_json::from_str(&row.item)
.map_err(|e| DomainError::RepositoryError(format!("Invalid slot item JSON: {}", e)))?;
map.insert(block_id, item.id);
}
Ok(map)
}
async fn save_playback_record(&self, record: &PlaybackRecord) -> DomainResult<()> { async fn save_playback_record(&self, record: &PlaybackRecord) -> DomainResult<()> {
sqlx::query( sqlx::query(
r#" r#"

View File

@@ -1,9 +1,12 @@
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use domain::{ChannelId, DomainError, DomainResult, GeneratedSchedule, PlaybackRecord, ScheduleRepository}; use std::collections::HashMap;
use super::mapping::{map_schedule, PlaybackRecordRow, ScheduleRow, SlotRow}; use domain::{BlockId, ChannelId, DomainError, DomainResult, GeneratedSchedule, MediaItemId, PlaybackRecord, ScheduleRepository};
use uuid::Uuid;
use super::mapping::{map_schedule, LastSlotRow, PlaybackRecordRow, ScheduleRow, SlotRow};
pub struct SqliteScheduleRepository { pub struct SqliteScheduleRepository {
pool: sqlx::SqlitePool, pool: sqlx::SqlitePool,
@@ -146,6 +149,112 @@ impl ScheduleRepository for SqliteScheduleRepository {
rows.into_iter().map(PlaybackRecord::try_from).collect() rows.into_iter().map(PlaybackRecord::try_from).collect()
} }
async fn find_last_slot_per_block(
&self,
channel_id: ChannelId,
) -> DomainResult<HashMap<BlockId, MediaItemId>> {
let channel_id_str = channel_id.to_string();
let rows: Vec<LastSlotRow> = sqlx::query_as(
"SELECT ss.source_block_id, ss.item \
FROM scheduled_slots ss \
INNER JOIN generated_schedules gs ON gs.id = ss.schedule_id \
WHERE gs.channel_id = ? \
AND ss.start_at = ( \
SELECT MAX(ss2.start_at) \
FROM scheduled_slots ss2 \
INNER JOIN generated_schedules gs2 ON gs2.id = ss2.schedule_id \
WHERE ss2.source_block_id = ss.source_block_id \
AND gs2.channel_id = ? \
)",
)
.bind(&channel_id_str)
.bind(&channel_id_str)
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
let mut map = HashMap::new();
for row in rows {
let block_id = uuid::Uuid::parse_str(&row.source_block_id)
.map_err(|e| DomainError::RepositoryError(format!("Invalid block UUID: {}", e)))?;
let item: domain::MediaItem = serde_json::from_str(&row.item)
.map_err(|e| DomainError::RepositoryError(format!("Invalid slot item JSON: {}", e)))?;
map.insert(block_id, item.id);
}
Ok(map)
}
async fn list_schedule_history(
&self,
channel_id: ChannelId,
) -> DomainResult<Vec<GeneratedSchedule>> {
let rows: Vec<ScheduleRow> = sqlx::query_as(
"SELECT id, channel_id, valid_from, valid_until, generation \
FROM generated_schedules WHERE channel_id = ? ORDER BY generation DESC",
)
.bind(channel_id.to_string())
.fetch_all(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
rows.into_iter()
.map(|r| map_schedule(r, vec![]))
.collect()
}
async fn get_schedule_by_id(
&self,
channel_id: ChannelId,
schedule_id: Uuid,
) -> DomainResult<Option<GeneratedSchedule>> {
let row: Option<ScheduleRow> = sqlx::query_as(
"SELECT id, channel_id, valid_from, valid_until, generation \
FROM generated_schedules WHERE id = ? AND channel_id = ?",
)
.bind(schedule_id.to_string())
.bind(channel_id.to_string())
.fetch_optional(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
match row {
None => Ok(None),
Some(r) => {
let slots = self.fetch_slots(&r.id).await?;
Some(map_schedule(r, slots)).transpose()
}
}
}
async fn delete_schedules_after(
&self,
channel_id: ChannelId,
target_generation: u32,
) -> DomainResult<()> {
let target_gen = target_generation as i64;
let ch = channel_id.to_string();
sqlx::query(
"DELETE FROM playback_records WHERE channel_id = ? AND generation > ?",
)
.bind(&ch)
.bind(target_gen)
.execute(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
sqlx::query(
"DELETE FROM generated_schedules WHERE channel_id = ? AND generation > ?",
)
.bind(&ch)
.bind(target_gen)
.execute(&self.pool)
.await
.map_err(|e| DomainError::RepositoryError(e.to_string()))?;
Ok(())
}
async fn save_playback_record(&self, record: &PlaybackRecord) -> DomainResult<()> { async fn save_playback_record(&self, record: &PlaybackRecord) -> DomainResult<()> {
sqlx::query( sqlx::query(
r#" r#"

View File

@@ -2,7 +2,6 @@
name = "mcp" name = "mcp"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
default-run = "mcp"
[features] [features]
default = ["sqlite", "jellyfin"] default = ["sqlite", "jellyfin"]

View File

@@ -9,5 +9,5 @@ pub fn json_err(e: serde_json::Error) -> String {
} }
pub fn ok_json<T: serde::Serialize>(value: &T) -> String { pub fn ok_json<T: serde::Serialize>(value: &T) -> String {
serde_json::to_string(value).unwrap_or_else(|e| json_err(e)) serde_json::to_string(value).unwrap_or_else(json_err)
} }

View File

@@ -87,7 +87,7 @@ async fn main() -> anyhow::Result<()> {
#[cfg(feature = "local-files")] #[cfg(feature = "local-files")]
if let Some(dir) = std::env::var("LOCAL_FILES_DIR").ok().map(std::path::PathBuf::from) { if let Some(dir) = std::env::var("LOCAL_FILES_DIR").ok().map(std::path::PathBuf::from) {
if let k_core::db::DatabasePool::Sqlite(ref sqlite_pool) = db_pool { let k_core::db::DatabasePool::Sqlite(ref sqlite_pool) = db_pool;
let base_url = std::env::var("BASE_URL") let base_url = std::env::var("BASE_URL")
.unwrap_or_else(|_| "http://localhost:3000".to_string()); .unwrap_or_else(|_| "http://localhost:3000".to_string());
let lf_cfg = infra::LocalFilesConfig { let lf_cfg = infra::LocalFilesConfig {
@@ -101,7 +101,6 @@ async fn main() -> anyhow::Result<()> {
tokio::spawn(async move { scan_idx.rescan().await; }); tokio::spawn(async move { scan_idx.rescan().await; });
registry.register("local", Arc::new(infra::LocalFilesProvider::new(idx, lf_cfg, None))); registry.register("local", Arc::new(infra::LocalFilesProvider::new(idx, lf_cfg, None)));
} }
}
if registry.is_empty() { if registry.is_empty() {
tracing::warn!("No media provider configured. Set JELLYFIN_BASE_URL or LOCAL_FILES_DIR."); tracing::warn!("No media provider configured. Set JELLYFIN_BASE_URL or LOCAL_FILES_DIR.");

View File

@@ -59,14 +59,16 @@ pub struct DeleteChannelParams {
pub struct SetScheduleConfigParams { pub struct SetScheduleConfigParams {
/// Channel UUID /// Channel UUID
pub channel_id: String, pub channel_id: String,
/// JSON array of ProgrammingBlock objects /// JSON object of the full ScheduleConfig shape: {"monday": [...], "tuesday": [...], ...}
pub blocks_json: String, pub day_blocks_json: String,
} }
#[derive(Debug, Deserialize, JsonSchema)] #[derive(Debug, Deserialize, JsonSchema)]
pub struct AddBlockParams { pub struct AddBlockParams {
/// Channel UUID /// Channel UUID
pub channel_id: String, pub channel_id: String,
/// Day of week: "monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"
pub day: String,
/// ProgrammingBlock serialized as JSON /// ProgrammingBlock serialized as JSON
pub block_json: String, pub block_json: String,
} }
@@ -163,43 +165,44 @@ impl KTvMcpServer {
} }
#[tool( #[tool(
description = "Replace a channel's entire schedule config. blocks_json is a JSON array of ProgrammingBlock objects." description = "Replace a channel's entire schedule config. day_blocks_json is a JSON object of the ScheduleConfig shape: {\"monday\": [...], ...}"
)] )]
async fn set_schedule_config(&self, #[tool(aggr)] p: SetScheduleConfigParams) -> String { async fn set_schedule_config(&self, #[tool(aggr)] p: SetScheduleConfigParams) -> String {
let channel_id = match parse_uuid(&p.channel_id) { let channel_id = match parse_uuid(&p.channel_id) {
Ok(id) => id, Ok(id) => id,
Err(e) => return e, Err(e) => return e,
}; };
let blocks: Vec<ProgrammingBlock> = match serde_json::from_str(&p.blocks_json) { let config: ScheduleConfig = match serde_json::from_str(&p.day_blocks_json) {
Ok(b) => b, Ok(c) => c,
Err(e) => { Err(e) => {
return serde_json::json!({"error": format!("invalid blocks_json: {e}")}) return serde_json::json!({"error": format!("invalid day_blocks_json: {e}")})
.to_string() .to_string()
} }
}; };
channels::set_schedule_config( channels::set_schedule_config(&self.channel_service, channel_id, config).await
&self.channel_service,
channel_id,
ScheduleConfig { blocks },
)
.await
} }
#[tool( #[tool(
description = "Append a ProgrammingBlock to a channel's schedule. block_json is a serialized ProgrammingBlock." description = "Append a ProgrammingBlock to a channel's schedule for a specific day. day: monday|tuesday|wednesday|thursday|friday|saturday|sunday. block_json is a serialized ProgrammingBlock."
)] )]
async fn add_programming_block(&self, #[tool(aggr)] p: AddBlockParams) -> String { async fn add_programming_block(&self, #[tool(aggr)] p: AddBlockParams) -> String {
let channel_id = match parse_uuid(&p.channel_id) { let channel_id = match parse_uuid(&p.channel_id) {
Ok(id) => id, Ok(id) => id,
Err(e) => return e, Err(e) => return e,
}; };
let day: domain::Weekday = match serde_json::from_str(&format!("\"{}\"", p.day)) {
Ok(d) => d,
Err(e) => {
return serde_json::json!({"error": format!("invalid day: {e}")}).to_string()
}
};
let block: ProgrammingBlock = match serde_json::from_str(&p.block_json) { let block: ProgrammingBlock = match serde_json::from_str(&p.block_json) {
Ok(b) => b, Ok(b) => b,
Err(e) => { Err(e) => {
return serde_json::json!({"error": format!("invalid block_json: {e}")}).to_string() return serde_json::json!({"error": format!("invalid block_json: {e}")}).to_string()
} }
}; };
channels::add_programming_block(&self.channel_service, channel_id, block).await channels::add_programming_block(&self.channel_service, channel_id, day, block).await
} }
#[tool(description = "Remove a programming block from a channel's schedule by block UUID")] #[tool(description = "Remove a programming block from a channel's schedule by block UUID")]

View File

@@ -95,13 +95,17 @@ pub async fn set_schedule_config(
pub async fn add_programming_block( pub async fn add_programming_block(
svc: &Arc<ChannelService>, svc: &Arc<ChannelService>,
channel_id: Uuid, channel_id: Uuid,
day: domain::Weekday,
block: domain::ProgrammingBlock, block: domain::ProgrammingBlock,
) -> String { ) -> String {
let mut channel: Channel = match svc.find_by_id(channel_id).await { let mut channel: Channel = match svc.find_by_id(channel_id).await {
Ok(c) => c, Ok(c) => c,
Err(e) => return domain_err(e), Err(e) => return domain_err(e),
}; };
channel.schedule_config.blocks.push(block); channel.schedule_config.day_blocks
.entry(day)
.or_default()
.push(block);
channel.updated_at = chrono::Utc::now(); channel.updated_at = chrono::Utc::now();
match svc.update(channel).await { match svc.update(channel).await {
Ok(c) => ok_json(&c), Ok(c) => ok_json(&c),
@@ -118,7 +122,9 @@ pub async fn remove_programming_block(
Ok(c) => c, Ok(c) => c,
Err(e) => return domain_err(e), Err(e) => return domain_err(e),
}; };
channel.schedule_config.blocks.retain(|b| b.id != block_id); for blocks in channel.schedule_config.day_blocks.values_mut() {
blocks.retain(|b| b.id != block_id);
}
channel.updated_at = chrono::Utc::now(); channel.updated_at = chrono::Utc::now();
match svc.update(channel).await { match svc.update(channel).await {
Ok(c) => ok_json(&c), Ok(c) => ok_json(&c),

View File

@@ -0,0 +1,12 @@
CREATE TABLE channel_config_snapshots (
id TEXT PRIMARY KEY NOT NULL,
channel_id TEXT NOT NULL REFERENCES channels(id) ON DELETE CASCADE,
config_json TEXT NOT NULL,
version_num INTEGER NOT NULL,
label TEXT,
created_at TEXT NOT NULL,
UNIQUE (channel_id, version_num)
);
CREATE INDEX idx_config_snapshots_channel
ON channel_config_snapshots(channel_id, version_num DESC);

View File

@@ -0,0 +1,17 @@
-- Recreate provider_configs with per-instance id as PK
CREATE TABLE provider_configs_new (
id TEXT PRIMARY KEY,
provider_type TEXT NOT NULL,
config_json TEXT NOT NULL,
enabled INTEGER NOT NULL DEFAULT 1,
updated_at TEXT NOT NULL
);
INSERT INTO provider_configs_new (id, provider_type, config_json, enabled, updated_at)
SELECT provider_type, provider_type, config_json, enabled, updated_at
FROM provider_configs;
DROP TABLE provider_configs;
ALTER TABLE provider_configs_new RENAME TO provider_configs;
-- Scope local_files_index entries by provider instance
ALTER TABLE local_files_index ADD COLUMN provider_id TEXT NOT NULL DEFAULT 'local';
CREATE INDEX IF NOT EXISTS idx_local_files_provider ON local_files_index(provider_id);

View File

@@ -0,0 +1,37 @@
CREATE TABLE IF NOT EXISTS library_items (
id TEXT PRIMARY KEY,
provider_id TEXT NOT NULL,
external_id TEXT NOT NULL,
title TEXT NOT NULL,
content_type TEXT NOT NULL,
duration_secs INTEGER NOT NULL DEFAULT 0,
series_name TEXT,
season_number INTEGER,
episode_number INTEGER,
year INTEGER,
genres TEXT NOT NULL DEFAULT '[]',
tags TEXT NOT NULL DEFAULT '[]',
collection_id TEXT,
collection_name TEXT,
collection_type TEXT,
thumbnail_url TEXT,
synced_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_library_items_provider ON library_items(provider_id);
CREATE INDEX IF NOT EXISTS idx_library_items_content_type ON library_items(content_type);
CREATE INDEX IF NOT EXISTS idx_library_items_series ON library_items(series_name);
CREATE UNIQUE INDEX IF NOT EXISTS idx_library_items_provider_external ON library_items(provider_id, external_id);
CREATE TABLE IF NOT EXISTS library_sync_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
provider_id TEXT NOT NULL,
started_at TEXT NOT NULL,
finished_at TEXT,
items_found INTEGER NOT NULL DEFAULT 0,
status TEXT NOT NULL DEFAULT 'running',
error_msg TEXT
);
CREATE INDEX IF NOT EXISTS idx_library_sync_log_provider ON library_sync_log(provider_id);
CREATE INDEX IF NOT EXISTS idx_library_sync_log_provider_started ON library_sync_log(provider_id, started_at DESC);

View File

@@ -0,0 +1,6 @@
CREATE TABLE IF NOT EXISTS app_settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
);
INSERT OR IGNORE INTO app_settings(key, value) VALUES ('library_sync_interval_hours', '6');

View File

@@ -8,12 +8,13 @@ import { useConfig } from "@/hooks/use-channels";
export default function LoginPage() { export default function LoginPage() {
const [email, setEmail] = useState(""); const [email, setEmail] = useState("");
const [password, setPassword] = useState(""); const [password, setPassword] = useState("");
const [rememberMe, setRememberMe] = useState(false);
const { mutate: login, isPending, error } = useLogin(); const { mutate: login, isPending, error } = useLogin();
const { data: config } = useConfig(); const { data: config } = useConfig();
const handleSubmit = (e: React.FormEvent) => { const handleSubmit = (e: React.FormEvent) => {
e.preventDefault(); e.preventDefault();
login({ email, password }); login({ email, password, rememberMe });
}; };
return ( return (
@@ -54,6 +55,23 @@ export default function LoginPage() {
/> />
</div> </div>
<div className="space-y-1">
<label className="flex cursor-pointer items-center gap-2">
<input
type="checkbox"
checked={rememberMe}
onChange={(e) => setRememberMe(e.target.checked)}
className="h-3.5 w-3.5 rounded border-zinc-600 bg-zinc-900 accent-white"
/>
<span className="text-xs text-zinc-400">Remember me</span>
</label>
{rememberMe && (
<p className="pl-5 text-xs text-amber-500/80">
A refresh token will be stored locally don&apos;t share it.
</p>
)}
</div>
{error && <p className="text-xs text-red-400">{error.message}</p>} {error && <p className="text-xs text-red-400">{error.message}</p>}
<button <button

View File

@@ -1,15 +1,36 @@
"use client"; "use client";
import { useState } from "react"; import { useState } from "react";
import { useProviderConfigs, useUpdateProvider, useTestProvider } from "@/hooks/use-admin-providers"; import {
useProviderConfigs,
useCreateProvider,
useUpdateProvider,
useDeleteProvider,
useTestProvider,
} from "@/hooks/use-admin-providers";
import { useConfig } from "@/hooks/use-config"; import { useConfig } from "@/hooks/use-config";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input"; import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label"; import { Label } from "@/components/ui/label";
import { Switch } from "@/components/ui/switch"; import { Switch } from "@/components/ui/switch";
import { CheckCircle, XCircle, Loader2 } from "lucide-react"; import { Badge } from "@/components/ui/badge";
import {
Dialog,
DialogContent,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { CheckCircle, XCircle, Loader2, Plus, Trash2 } from "lucide-react";
import { ApiRequestError } from "@/lib/api"; import { ApiRequestError } from "@/lib/api";
import type { ProviderConfig } from "@/lib/types";
const PROVIDER_FIELDS: Record< const PROVIDER_FIELDS: Record<
string, string,
@@ -27,28 +48,37 @@ const PROVIDER_FIELDS: Record<
], ],
}; };
interface ProviderCardProps { function isValidInstanceId(id: string): boolean {
providerType: string; return id.length >= 1 && id.length <= 40 && /^[a-zA-Z0-9-]+$/.test(id);
existingConfig?: { config_json: Record<string, string>; enabled: boolean };
} }
function ProviderCard({ providerType, existingConfig }: ProviderCardProps) { // ---------------------------------------------------------------------------
const fields = PROVIDER_FIELDS[providerType] ?? []; // Existing instance card
// ---------------------------------------------------------------------------
interface ProviderCardProps {
config: ProviderConfig;
existingIds: string[];
}
function ProviderCard({ config }: ProviderCardProps) {
const fields = PROVIDER_FIELDS[config.provider_type] ?? [];
const [formValues, setFormValues] = useState<Record<string, string>>( const [formValues, setFormValues] = useState<Record<string, string>>(
() => existingConfig?.config_json ?? {}, () => config.config_json ?? {},
); );
const [enabled, setEnabled] = useState(existingConfig?.enabled ?? true); const [enabled, setEnabled] = useState(config.enabled);
const [conflictError, setConflictError] = useState(false); const [conflictError, setConflictError] = useState(false);
const [testResult, setTestResult] = useState<{ ok: boolean; message: string } | null>(null); const [testResult, setTestResult] = useState<{ ok: boolean; message: string } | null>(null);
const updateProvider = useUpdateProvider(); const updateProvider = useUpdateProvider();
const deleteProvider = useDeleteProvider();
const testProvider = useTestProvider(); const testProvider = useTestProvider();
const handleSave = async () => { const handleSave = async () => {
setConflictError(false); setConflictError(false);
try { try {
await updateProvider.mutateAsync({ await updateProvider.mutateAsync({
type: providerType, id: config.id,
payload: { config_json: formValues, enabled }, payload: { config_json: formValues, enabled },
}); });
} catch (e: unknown) { } catch (e: unknown) {
@@ -61,21 +91,44 @@ function ProviderCard({ providerType, existingConfig }: ProviderCardProps) {
const handleTest = async () => { const handleTest = async () => {
setTestResult(null); setTestResult(null);
const result = await testProvider.mutateAsync({ const result = await testProvider.mutateAsync({
type: providerType, provider_type: config.provider_type,
payload: { config_json: formValues, enabled: true }, config_json: formValues,
}); });
setTestResult(result); setTestResult(result);
}; };
const handleDelete = async () => {
if (!confirm(`Delete provider instance "${config.id}"?`)) return;
await deleteProvider.mutateAsync(config.id);
};
return ( return (
<Card className="border-zinc-800 bg-zinc-900"> <Card className="border-zinc-800 bg-zinc-900">
<CardHeader className="flex flex-row items-center justify-between pb-3"> <CardHeader className="flex flex-row items-center justify-between pb-3">
<CardTitle className="text-sm font-medium capitalize text-zinc-100"> <div className="flex items-center gap-2">
{providerType.replace("_", " ")} <Badge variant="outline" className="font-mono text-xs text-zinc-300 border-zinc-600">
</CardTitle> {config.id}
</Badge>
<span className="text-xs text-zinc-500 capitalize">
{config.provider_type.replace("_", " ")}
</span>
</div>
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
<span className="text-xs text-zinc-400">Enabled</span> <span className="text-xs text-zinc-400">Enabled</span>
<Switch checked={enabled} onCheckedChange={setEnabled} /> <Switch checked={enabled} onCheckedChange={setEnabled} />
<Button
variant="ghost"
size="icon"
onClick={handleDelete}
disabled={deleteProvider.isPending}
className="h-7 w-7 text-zinc-500 hover:text-red-400"
>
{deleteProvider.isPending ? (
<Loader2 className="h-3.5 w-3.5 animate-spin" />
) : (
<Trash2 className="h-3.5 w-3.5" />
)}
</Button>
</div> </div>
</CardHeader> </CardHeader>
<CardContent className="space-y-3"> <CardContent className="space-y-3">
@@ -145,36 +198,241 @@ function ProviderCard({ providerType, existingConfig }: ProviderCardProps) {
); );
} }
// ---------------------------------------------------------------------------
// Add Instance dialog
// ---------------------------------------------------------------------------
interface AddInstanceDialogProps {
open: boolean;
onClose: () => void;
availableTypes: string[];
existingIds: string[];
}
function AddInstanceDialog({ open, onClose, availableTypes, existingIds }: AddInstanceDialogProps) {
const [instanceId, setInstanceId] = useState("");
const [providerType, setProviderType] = useState(availableTypes[0] ?? "");
const [formValues, setFormValues] = useState<Record<string, string>>({});
const [idError, setIdError] = useState<string | null>(null);
const [apiError, setApiError] = useState<string | null>(null);
const createProvider = useCreateProvider();
const testProvider = useTestProvider();
const [testResult, setTestResult] = useState<{ ok: boolean; message: string } | null>(null);
const fields = PROVIDER_FIELDS[providerType] ?? [];
const handleTypeChange = (t: string) => {
setProviderType(t);
setFormValues({});
setTestResult(null);
};
const validateId = (id: string): string | null => {
if (!id) return "ID is required";
if (!isValidInstanceId(id)) return "Only alphanumeric characters and hyphens, 140 chars";
if (existingIds.includes(id)) return "An instance with this ID already exists";
return null;
};
const handleCreate = async () => {
const err = validateId(instanceId);
if (err) { setIdError(err); return; }
setIdError(null);
setApiError(null);
try {
await createProvider.mutateAsync({
id: instanceId,
provider_type: providerType,
config_json: formValues,
enabled: true,
});
onClose();
setInstanceId("");
setFormValues({});
setTestResult(null);
} catch (e: unknown) {
if (e instanceof ApiRequestError && e.status === 409) {
setIdError("An instance with this ID already exists");
} else if (e instanceof ApiRequestError) {
setApiError(e.message);
}
}
};
const handleTest = async () => {
setTestResult(null);
const result = await testProvider.mutateAsync({
provider_type: providerType,
config_json: formValues,
});
setTestResult(result);
};
return (
<Dialog open={open} onOpenChange={(v) => { if (!v) onClose(); }}>
<DialogContent className="border-zinc-800 bg-zinc-950 text-zinc-100 max-w-md">
<DialogHeader>
<DialogTitle className="text-sm font-semibold">Add Provider Instance</DialogTitle>
</DialogHeader>
<div className="space-y-4 pt-2">
<div className="space-y-1">
<Label className="text-xs text-zinc-400">
Instance ID <span className="text-red-400">*</span>
</Label>
<Input
value={instanceId}
onChange={(e) => {
setInstanceId(e.target.value);
setIdError(null);
}}
placeholder="e.g. jellyfin-main"
className="h-8 border-zinc-700 bg-zinc-800 text-xs text-zinc-100 font-mono"
/>
{idError && <p className="text-xs text-red-400">{idError}</p>}
<p className="text-xs text-zinc-600">Alphanumeric + hyphens, 140 chars</p>
</div>
<div className="space-y-1">
<Label className="text-xs text-zinc-400">
Provider Type <span className="text-red-400">*</span>
</Label>
<Select value={providerType} onValueChange={handleTypeChange}>
<SelectTrigger className="h-8 border-zinc-700 bg-zinc-800 text-xs text-zinc-100">
<SelectValue />
</SelectTrigger>
<SelectContent className="border-zinc-700 bg-zinc-900">
{availableTypes.map((t) => (
<SelectItem key={t} value={t} className="text-xs capitalize text-zinc-200">
{t.replace("_", " ")}
</SelectItem>
))}
</SelectContent>
</Select>
</div>
{fields.map((field) => (
<div key={field.key} className="space-y-1">
<Label className="text-xs text-zinc-400">
{field.label}
{field.required && <span className="ml-1 text-red-400">*</span>}
</Label>
<Input
type={field.type ?? "text"}
value={formValues[field.key] ?? ""}
onChange={(e) =>
setFormValues((prev) => ({ ...prev, [field.key]: e.target.value }))
}
placeholder={
field.type === "password" ? "••••••••" : `Enter ${field.label.toLowerCase()}`
}
className="h-8 border-zinc-700 bg-zinc-800 text-xs text-zinc-100"
/>
</div>
))}
{testResult && (
<div
className={`flex items-center gap-2 rounded px-3 py-2 text-xs ${
testResult.ok
? "bg-green-950/30 text-green-400"
: "bg-red-950/30 text-red-400"
}`}
>
{testResult.ok ? (
<CheckCircle className="h-3.5 w-3.5" />
) : (
<XCircle className="h-3.5 w-3.5" />
)}
{testResult.message}
</div>
)}
{apiError && (
<p className="text-xs text-red-400">{apiError}</p>
)}
<div className="flex gap-2 pt-1">
<Button
variant="outline"
size="sm"
onClick={handleTest}
disabled={testProvider.isPending}
className="border-zinc-700 text-xs"
>
{testProvider.isPending && <Loader2 className="mr-1 h-3 w-3 animate-spin" />}
Test
</Button>
<Button
size="sm"
onClick={handleCreate}
disabled={createProvider.isPending}
className="text-xs"
>
{createProvider.isPending && <Loader2 className="mr-1 h-3 w-3 animate-spin" />}
Create
</Button>
</div>
</div>
</DialogContent>
</Dialog>
);
}
// ---------------------------------------------------------------------------
// Panel
// ---------------------------------------------------------------------------
export function ProviderSettingsPanel() { export function ProviderSettingsPanel() {
const { data: config } = useConfig(); const { data: config } = useConfig();
const { data: providerConfigs = [] } = useProviderConfigs(); const { data: providerConfigs = [] } = useProviderConfigs();
const [addOpen, setAddOpen] = useState(false);
const availableTypes = config?.available_provider_types ?? []; const availableTypes = config?.available_provider_types ?? [];
const existingIds = providerConfigs.map((c) => c.id);
return ( return (
<div className="space-y-4 p-6"> <div className="space-y-4 p-6">
<div className="flex items-center justify-between">
<div> <div>
<h2 className="text-sm font-semibold text-zinc-100">Provider Configuration</h2> <h2 className="text-sm font-semibold text-zinc-100">Provider Instances</h2>
<p className="mt-0.5 text-xs text-zinc-500"> <p className="mt-0.5 text-xs text-zinc-500">
Configure media providers. Requires <code>CONFIG_SOURCE=db</code> on the server. Manage media provider instances. Requires <code>CONFIG_SOURCE=db</code> on the server.
</p> </p>
</div> </div>
{availableTypes.length > 0 && (
<Button
size="sm"
variant="outline"
onClick={() => setAddOpen(true)}
className="border-zinc-700 text-xs gap-1"
>
<Plus className="h-3.5 w-3.5" />
Add Instance
</Button>
)}
</div>
{availableTypes.length === 0 ? ( {availableTypes.length === 0 ? (
<p className="text-xs text-zinc-500">No providers available in this build.</p> <p className="text-xs text-zinc-500">No providers available in this build.</p>
) : providerConfigs.length === 0 ? (
<p className="text-xs text-zinc-500">
No provider instances configured. Click &quot;Add Instance&quot; to get started.
</p>
) : ( ) : (
<div className="space-y-4"> <div className="space-y-4">
{availableTypes.map((type) => { {providerConfigs.map((c) => (
const existing = providerConfigs.find((c) => c.provider_type === type); <ProviderCard key={c.id} config={c} existingIds={existingIds} />
return ( ))}
<ProviderCard
key={type}
providerType={type}
existingConfig={existing}
/>
);
})}
</div> </div>
)} )}
<AddInstanceDialog
open={addOpen}
onClose={() => setAddOpen(false)}
availableTypes={availableTypes}
existingIds={existingIds}
/>
</div> </div>
); );
} }

View File

@@ -11,6 +11,7 @@ import {
Download, Download,
ChevronUp, ChevronUp,
ChevronDown, ChevronDown,
History,
} from "lucide-react"; } from "lucide-react";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { useActiveSchedule } from "@/hooks/use-channels"; import { useActiveSchedule } from "@/hooks/use-channels";
@@ -29,6 +30,7 @@ interface ChannelCardProps {
onExport: () => void; onExport: () => void;
onMoveUp: () => void; onMoveUp: () => void;
onMoveDown: () => void; onMoveDown: () => void;
onScheduleHistory: () => void;
} }
function useScheduleStatus(channelId: string) { function useScheduleStatus(channelId: string) {
@@ -69,9 +71,12 @@ export function ChannelCard({
onExport, onExport,
onMoveUp, onMoveUp,
onMoveDown, onMoveDown,
onScheduleHistory,
}: ChannelCardProps) { }: ChannelCardProps) {
const [confirmOpen, setConfirmOpen] = useState(false); const [confirmOpen, setConfirmOpen] = useState(false);
const blockCount = channel.schedule_config.blocks.length; const blockCount = Object.values(channel.schedule_config.day_blocks).reduce(
(sum, blocks) => sum + blocks.length, 0
);
const { status, label } = useScheduleStatus(channel.id); const { status, label } = useScheduleStatus(channel.id);
const scheduleColor = const scheduleColor =
@@ -183,6 +188,15 @@ export function ChannelCard({
> >
<CalendarDays className="size-3.5" /> <CalendarDays className="size-3.5" />
</Button> </Button>
<Button
size="icon-sm"
variant="ghost"
onClick={onScheduleHistory}
title="Schedule history"
className="text-zinc-600 hover:text-zinc-200"
>
<History className="size-3.5" />
</Button>
<Button <Button
size="icon-sm" size="icon-sm"
asChild asChild

View File

@@ -0,0 +1,119 @@
'use client'
import { useState } from 'react'
import { Sheet, SheetContent, SheetHeader, SheetTitle } from '@/components/ui/sheet'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { useConfigHistory, usePinSnapshot, useRestoreConfig } from '@/hooks/use-channels'
import { cn } from '@/lib/utils'
interface Props {
channelId: string
open: boolean
onOpenChange: (open: boolean) => void
}
export function ConfigHistorySheet({ channelId, open, onOpenChange }: Props) {
const { data: snapshots } = useConfigHistory(channelId)
const pin = usePinSnapshot()
const restore = useRestoreConfig()
const [pinningId, setPinningId] = useState<string | null>(null)
const [pinLabel, setPinLabel] = useState('')
return (
<Sheet open={open} onOpenChange={onOpenChange}>
<SheetContent>
<SheetHeader>
<SheetTitle>Config history</SheetTitle>
</SheetHeader>
<div className="flex flex-col gap-2 mt-4 overflow-y-auto px-4 pb-4">
{(snapshots ?? []).map((snap, i) => (
<div
key={snap.id}
className={cn(
'flex items-center gap-3 p-3 rounded border',
i === 0 ? 'border-green-700 bg-green-950/30' : 'border-border'
)}
>
<div className="flex-1 min-w-0">
<div className="text-sm font-medium">
v{snap.version_num} {' '}
{new Date(snap.created_at).toLocaleString()}
{i === 0 && (
<span className="ml-2 text-xs text-green-400 bg-green-950 px-1.5 py-0.5 rounded">
current
</span>
)}
</div>
{snap.label ? (
<div className="text-xs text-amber-400 mt-0.5">📌 {snap.label}</div>
) : (
<div className="text-xs text-muted-foreground">Auto-saved</div>
)}
</div>
{i === 0 && (
pinningId === snap.id ? (
<div className="flex gap-1 items-center">
<Input
value={pinLabel}
onChange={e => setPinLabel(e.target.value)}
className="h-7 text-xs w-32"
placeholder="label…"
onKeyDown={e => {
if (e.key === 'Enter') {
pin.mutate({ channelId, snapId: snap.id, label: pinLabel })
setPinningId(null)
}
if (e.key === 'Escape') setPinningId(null)
}}
/>
<Button
size="sm"
onClick={() => {
pin.mutate({ channelId, snapId: snap.id, label: pinLabel })
setPinningId(null)
}}
>
Save
</Button>
<Button size="sm" variant="ghost" onClick={() => setPinningId(null)}>
</Button>
</div>
) : (
<Button
variant="outline"
size="sm"
onClick={() => {
setPinningId(snap.id)
setPinLabel(snap.label ?? '')
}}
>
Pin
</Button>
)
)}
{i > 0 && (
<Button
variant="outline"
size="sm"
onClick={() => restore.mutate({ channelId, snapId: snap.id })}
disabled={restore.isPending}
>
Restore
</Button>
)}
</div>
))}
{(snapshots ?? []).length === 0 && (
<p className="text-sm text-muted-foreground text-center py-8">
No history yet. History is created automatically when you save changes.
</p>
)}
</div>
</SheetContent>
</Sheet>
)
}

View File

@@ -15,6 +15,7 @@ import { RecyclePolicyEditor } from "./recycle-policy-editor";
import { WebhookEditor } from "./webhook-editor"; import { WebhookEditor } from "./webhook-editor";
import { AccessSettingsEditor } from "./access-settings-editor"; import { AccessSettingsEditor } from "./access-settings-editor";
import { LogoEditor } from "./logo-editor"; import { LogoEditor } from "./logo-editor";
import { ConfigHistorySheet } from "./config-history-sheet";
import { useChannelForm } from "@/hooks/use-channel-form"; import { useChannelForm } from "@/hooks/use-channel-form";
import { channelFormSchema, extractErrors } from "@/lib/schemas"; import { channelFormSchema, extractErrors } from "@/lib/schemas";
import type { FieldErrors } from "@/lib/schemas"; import type { FieldErrors } from "@/lib/schemas";
@@ -27,7 +28,10 @@ import type {
MediaFilter, MediaFilter,
ProviderInfo, ProviderInfo,
RecyclePolicy, RecyclePolicy,
Weekday,
} from "@/lib/types"; } from "@/lib/types";
import { WEEKDAYS, WEEKDAY_LABELS } from "@/lib/types";
import { cn } from "@/lib/utils";
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Local shared primitives (only used inside this file) // Local shared primitives (only used inside this file)
@@ -334,7 +338,7 @@ interface EditChannelSheetProps {
name: string; name: string;
description: string; description: string;
timezone: string; timezone: string;
schedule_config: { blocks: ProgrammingBlock[] }; schedule_config: { day_blocks: Record<Weekday, ProgrammingBlock[]> };
recycle_policy: RecyclePolicy; recycle_policy: RecyclePolicy;
auto_schedule: boolean; auto_schedule: boolean;
access_mode?: AccessMode; access_mode?: AccessMode;
@@ -364,6 +368,29 @@ export function EditChannelSheet({
}: EditChannelSheetProps) { }: EditChannelSheetProps) {
const form = useChannelForm(channel); const form = useChannelForm(channel);
const [fieldErrors, setFieldErrors] = useState<FieldErrors>({}); const [fieldErrors, setFieldErrors] = useState<FieldErrors>({});
const [activeDay, setActiveDay] = useState<Weekday>('monday');
const [copyTarget, setCopyTarget] = useState<Weekday | 'all' | ''>('');
const [configHistoryOpen, setConfigHistoryOpen] = useState(false);
const handleCopyTo = () => {
if (!copyTarget) return;
const sourceBlocks = form.dayBlocks[activeDay] ?? [];
if (copyTarget === 'all') {
const newDayBlocks = { ...form.dayBlocks };
for (const day of WEEKDAYS) {
if (day !== activeDay) {
newDayBlocks[day] = sourceBlocks.map(b => ({ ...b, id: crypto.randomUUID() }));
}
}
form.setDayBlocks(newDayBlocks);
} else {
form.setDayBlocks({
...form.dayBlocks,
[copyTarget]: sourceBlocks.map(b => ({ ...b, id: crypto.randomUUID() })),
});
}
setCopyTarget('');
};
const handleSubmit = (e: React.FormEvent) => { const handleSubmit = (e: React.FormEvent) => {
e.preventDefault(); e.preventDefault();
@@ -373,7 +400,7 @@ export function EditChannelSheet({
name: form.name, name: form.name,
description: form.description, description: form.description,
timezone: form.timezone, timezone: form.timezone,
blocks: form.blocks, day_blocks: form.dayBlocks,
recycle_policy: form.recyclePolicy, recycle_policy: form.recyclePolicy,
auto_schedule: form.autoSchedule, auto_schedule: form.autoSchedule,
access_mode: form.accessMode, access_mode: form.accessMode,
@@ -390,7 +417,7 @@ export function EditChannelSheet({
name: form.name, name: form.name,
description: form.description, description: form.description,
timezone: form.timezone, timezone: form.timezone,
schedule_config: { blocks: form.blocks }, schedule_config: { day_blocks: form.dayBlocks },
recycle_policy: form.recyclePolicy, recycle_policy: form.recyclePolicy,
auto_schedule: form.autoSchedule, auto_schedule: form.autoSchedule,
access_mode: form.accessMode !== "public" ? form.accessMode : "public", access_mode: form.accessMode !== "public" ? form.accessMode : "public",
@@ -410,6 +437,7 @@ export function EditChannelSheet({
}); });
}; };
return ( return (
<Sheet open={open} onOpenChange={onOpenChange}> <Sheet open={open} onOpenChange={onOpenChange}>
<SheetContent <SheetContent
@@ -542,6 +570,47 @@ export function EditChannelSheet({
{/* Right: block editor */} {/* Right: block editor */}
<div className="flex flex-1 flex-col overflow-hidden"> <div className="flex flex-1 flex-col overflow-hidden">
{/* Day tab bar */}
<div className="shrink-0 flex items-center border-b border-zinc-800 overflow-x-auto">
{WEEKDAYS.map(day => (
<button
key={day}
type="button"
onClick={() => { setActiveDay(day); form.setSelectedBlockId(null); }}
className={cn(
'px-4 py-2.5 text-sm whitespace-nowrap transition-colors shrink-0',
activeDay === day
? 'border-b-2 border-blue-400 text-blue-400'
: 'text-zinc-500 hover:text-zinc-300'
)}
>
{WEEKDAY_LABELS[day]}
</button>
))}
{/* Copy-to control */}
<div className="ml-auto flex items-center gap-1.5 px-3 py-1 text-xs text-zinc-500 shrink-0">
<span>Copy to</span>
<select
value={copyTarget}
onChange={e => setCopyTarget(e.target.value as Weekday | 'all' | '')}
className="bg-zinc-800 border border-zinc-700 rounded px-1 py-0.5 text-xs text-zinc-300"
>
<option value="">day</option>
{WEEKDAYS.filter(d => d !== activeDay).map(d => (
<option key={d} value={d}>{WEEKDAY_LABELS[d]}</option>
))}
<option value="all">All days</option>
</select>
<button
type="button"
onClick={handleCopyTo}
className="bg-blue-900/40 border border-blue-700 text-blue-400 px-2 py-0.5 rounded text-xs hover:bg-blue-900/60"
>
Copy
</button>
</div>
</div>
<div className="shrink-0 space-y-3 border-b border-zinc-800 px-5 py-4"> <div className="shrink-0 space-y-3 border-b border-zinc-800 px-5 py-4">
<div className="flex items-center justify-between"> <div className="flex items-center justify-between">
<h3 className="text-xs font-semibold uppercase tracking-wider text-zinc-500"> <h3 className="text-xs font-semibold uppercase tracking-wider text-zinc-500">
@@ -551,31 +620,31 @@ export function EditChannelSheet({
type="button" type="button"
variant="outline" variant="outline"
size="xs" size="xs"
onClick={() => form.addBlock()} onClick={() => form.addBlock(activeDay)}
className="border-zinc-700 text-zinc-300 hover:text-zinc-100" className="border-zinc-700 text-zinc-300 hover:text-zinc-100"
> >
<Plus className="size-3" /> <Plus className="size-3" />
Add block Add block for {WEEKDAY_LABELS[activeDay]}
</Button> </Button>
</div> </div>
<BlockTimeline <BlockTimeline
blocks={form.blocks} blocks={form.dayBlocks[activeDay] ?? []}
selectedId={form.selectedBlockId} selectedId={form.selectedBlockId}
onSelect={form.setSelectedBlockId} onSelect={form.setSelectedBlockId}
onChange={form.setBlocks} onChange={(blocks) => form.setDayBlocks(prev => ({ ...prev, [activeDay]: blocks }))}
onCreateBlock={(startMins, durationMins) => onCreateBlock={(startMins, durationMins) =>
form.addBlock(startMins, durationMins) form.addBlock(activeDay, startMins, durationMins)
} }
/> />
{form.blocks.length === 0 ? ( {(form.dayBlocks[activeDay] ?? []).length === 0 ? (
<p className="rounded-md border border-dashed border-zinc-700 px-4 py-4 text-center text-xs text-zinc-600"> <p className="rounded-md border border-dashed border-zinc-700 px-4 py-4 text-center text-xs text-zinc-600">
No blocks yet. Drag on the timeline or click Add block. No blocks for {WEEKDAY_LABELS[activeDay]}. Drag on the timeline or click Add block.
</p> </p>
) : ( ) : (
<div className="max-h-48 space-y-1 overflow-y-auto"> <div className="max-h-48 space-y-1 overflow-y-auto">
{form.blocks.map((block, idx) => ( {(form.dayBlocks[activeDay] ?? []).map((block, idx) => (
<button <button
key={block.id} key={block.id}
type="button" type="button"
@@ -603,7 +672,7 @@ export function EditChannelSheet({
role="button" role="button"
onClick={(e) => { onClick={(e) => {
e.stopPropagation(); e.stopPropagation();
form.removeBlock(idx); form.removeBlock(activeDay, idx);
}} }}
className="rounded p-1 text-zinc-600 hover:bg-zinc-700 hover:text-red-400" className="rounded p-1 text-zinc-600 hover:bg-zinc-700 hover:text-red-400"
> >
@@ -624,11 +693,12 @@ export function EditChannelSheet({
</div> </div>
); );
} }
const selectedIdx = form.blocks.findIndex( const activeDayBlocks = form.dayBlocks[activeDay] ?? [];
const selectedIdx = activeDayBlocks.findIndex(
(b) => b.id === form.selectedBlockId, (b) => b.id === form.selectedBlockId,
); );
const selectedBlock = const selectedBlock =
selectedIdx >= 0 ? form.blocks[selectedIdx] : null; selectedIdx >= 0 ? activeDayBlocks[selectedIdx] : null;
if (!selectedBlock) { if (!selectedBlock) {
return ( return (
<div className="flex h-full items-center justify-center text-sm text-zinc-600"> <div className="flex h-full items-center justify-center text-sm text-zinc-600">
@@ -642,7 +712,7 @@ export function EditChannelSheet({
index={selectedIdx} index={selectedIdx}
errors={fieldErrors} errors={fieldErrors}
providers={providers} providers={providers}
onChange={(b) => form.updateBlock(selectedIdx, b)} onChange={(b) => form.updateBlock(activeDay, selectedIdx, b)}
/> />
); );
})()} })()}
@@ -657,6 +727,15 @@ export function EditChannelSheet({
</p> </p>
)} )}
<div className="ml-auto flex gap-2"> <div className="ml-auto flex gap-2">
<Button
type="button"
variant="outline"
size="sm"
onClick={() => setConfigHistoryOpen(true)}
className="border-zinc-700 text-zinc-400 hover:text-zinc-100"
>
Config history
</Button>
<Button <Button
type="button" type="button"
variant="ghost" variant="ghost"
@@ -670,6 +749,13 @@ export function EditChannelSheet({
</Button> </Button>
</div> </div>
</div> </div>
{channel && (
<ConfigHistorySheet
channelId={channel.id}
open={configHistoryOpen}
onOpenChange={setConfigHistoryOpen}
/>
)}
</form> </form>
</SheetContent> </SheetContent>
</Sheet> </Sheet>

View File

@@ -0,0 +1,94 @@
'use client'
import { useState } from 'react'
import {
Dialog,
DialogContent,
DialogHeader,
DialogTitle,
} from '@/components/ui/dialog'
import { Button } from '@/components/ui/button'
import { useScheduleHistory, useRollbackSchedule } from '@/hooks/use-channels'
interface Props {
channelId: string
open: boolean
onOpenChange: (open: boolean) => void
}
const fmtDateRange = (from: string, until: string) =>
`${new Date(from).toLocaleDateString()} ${new Date(until).toLocaleDateString()}`
export function ScheduleHistoryDialog({ channelId, open, onOpenChange }: Props) {
const { data: entries } = useScheduleHistory(channelId)
const rollback = useRollbackSchedule()
const [confirmId, setConfirmId] = useState<string | null>(null)
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent>
<DialogHeader>
<DialogTitle>Schedule history</DialogTitle>
</DialogHeader>
<div className="flex flex-col gap-2 mt-2 max-h-[60vh] overflow-y-auto">
{(entries ?? []).map((entry, i) => (
<div
key={entry.id}
className="flex items-center gap-3 p-3 rounded border border-border"
>
<div className="flex-1 min-w-0">
<div className="text-sm font-medium">
Gen #{entry.generation}
{i === 0 && (
<span className="ml-2 text-xs text-green-400 bg-green-950 px-1.5 py-0.5 rounded">
active
</span>
)}
</div>
<div className="text-xs text-muted-foreground mt-0.5">
{fmtDateRange(entry.valid_from, entry.valid_until)}
</div>
</div>
{i > 0 && (
confirmId === entry.id ? (
<div className="flex items-center gap-1 text-xs">
<span className="text-amber-400 whitespace-nowrap">Roll back to gen #{entry.generation}?</span>
<Button
size="sm"
variant="destructive"
disabled={rollback.isPending}
onClick={() => {
rollback.mutate({ channelId, genId: entry.id })
setConfirmId(null)
onOpenChange(false)
}}
>
Confirm
</Button>
<Button size="sm" variant="ghost" onClick={() => setConfirmId(null)}>
Cancel
</Button>
</div>
) : (
<Button
size="sm"
variant="outline"
onClick={() => setConfirmId(entry.id)}
>
Rollback to here
</Button>
)
)}
</div>
))}
{(entries ?? []).length === 0 && (
<p className="text-sm text-muted-foreground text-center py-8">
No schedule history yet. Generate a schedule to get started.
</p>
)}
</div>
</DialogContent>
</Dialog>
)
}

View File

@@ -18,6 +18,8 @@ import {
useTranscodeStats, useTranscodeStats,
useClearTranscodeCache, useClearTranscodeCache,
} from "@/hooks/use-transcode"; } from "@/hooks/use-transcode";
import { useAdminSettings, useUpdateAdminSettings } from "@/hooks/use-admin-settings";
import { useTriggerSync, useLibrarySyncStatus } from "@/hooks/use-library-sync";
import { toast } from "sonner"; import { toast } from "sonner";
interface Props { interface Props {
@@ -39,6 +41,14 @@ export function TranscodeSettingsDialog({ open, onOpenChange }: Props) {
const updateSettings = useUpdateTranscodeSettings(); const updateSettings = useUpdateTranscodeSettings();
const clearCache = useClearTranscodeCache(); const clearCache = useClearTranscodeCache();
const { data: adminSettings } = useAdminSettings();
const updateAdminSettings = useUpdateAdminSettings();
const triggerSync = useTriggerSync();
const { data: syncStatuses } = useLibrarySyncStatus();
const syncInterval = adminSettings?.library_sync_interval_hours ?? 6;
const [syncIntervalInput, setSyncIntervalInput] = useState<number | null>(null);
const displayInterval = syncIntervalInput ?? syncInterval;
const [ttl, setTtl] = useState<number>(24); const [ttl, setTtl] = useState<number>(24);
const [confirmClear, setConfirmClear] = useState(false); const [confirmClear, setConfirmClear] = useState(false);
@@ -130,6 +140,42 @@ export function TranscodeSettingsDialog({ open, onOpenChange }: Props) {
)} )}
</div> </div>
<div className="border-t border-zinc-800 pt-4 mt-4">
<h3 className="text-sm font-medium mb-3">Library sync</h3>
<div className="flex items-center gap-3 mb-3">
<label className="text-xs text-zinc-400 w-32">Sync interval (hours)</label>
<Input
type="number"
min={1}
max={168}
value={displayInterval}
onChange={e => setSyncIntervalInput(Number(e.target.value))}
className="h-8 w-24 text-xs bg-zinc-800 border-zinc-700 text-zinc-100"
/>
<Button
size="sm"
variant="outline"
onClick={() => updateAdminSettings.mutate({ library_sync_interval_hours: displayInterval })}
disabled={updateAdminSettings.isPending}
className="border-zinc-700 bg-transparent text-zinc-300 hover:bg-zinc-800 hover:text-zinc-100"
>
Save
</Button>
</div>
<Button
size="sm"
onClick={() => triggerSync.mutate()}
disabled={triggerSync.isPending || syncStatuses?.some(s => s.status === "running")}
>
{triggerSync.isPending ? "Triggering…" : "Sync now"}
</Button>
{syncStatuses?.map(s => (
<p key={s.id} className="mt-1 text-xs text-zinc-500">
{s.provider_id}: {s.status} {s.items_found} items
</p>
))}
</div>
<DialogFooter> <DialogFooter>
<Button <Button
variant="outline" variant="outline"

View File

@@ -28,10 +28,12 @@ import {
} from "./components/import-channel-dialog"; } from "./components/import-channel-dialog";
import { IptvExportDialog } from "./components/iptv-export-dialog"; import { IptvExportDialog } from "./components/iptv-export-dialog";
import { TranscodeSettingsDialog } from "./components/transcode-settings-dialog"; import { TranscodeSettingsDialog } from "./components/transcode-settings-dialog";
import { ScheduleHistoryDialog } from "./components/schedule-history-dialog";
import type { import type {
ChannelResponse, ChannelResponse,
ProgrammingBlock, ProgrammingBlock,
RecyclePolicy, RecyclePolicy,
Weekday,
} from "@/lib/types"; } from "@/lib/types";
export default function DashboardPage() { export default function DashboardPage() {
@@ -58,6 +60,7 @@ export default function DashboardPage() {
const [editChannel, setEditChannel] = useState<ChannelResponse | null>(null); const [editChannel, setEditChannel] = useState<ChannelResponse | null>(null);
const [deleteTarget, setDeleteTarget] = useState<ChannelResponse | null>(null); const [deleteTarget, setDeleteTarget] = useState<ChannelResponse | null>(null);
const [scheduleChannel, setScheduleChannel] = useState<ChannelResponse | null>(null); const [scheduleChannel, setScheduleChannel] = useState<ChannelResponse | null>(null);
const [scheduleHistoryChannelId, setScheduleHistoryChannelId] = useState<string | null>(null);
const handleCreate = (data: { const handleCreate = (data: {
name: string; name: string;
@@ -84,7 +87,7 @@ export default function DashboardPage() {
name: string; name: string;
description: string; description: string;
timezone: string; timezone: string;
schedule_config: { blocks: ProgrammingBlock[] }; schedule_config: { day_blocks: Record<Weekday, ProgrammingBlock[]> };
recycle_policy: RecyclePolicy; recycle_policy: RecyclePolicy;
auto_schedule: boolean; auto_schedule: boolean;
access_mode?: import("@/lib/types").AccessMode; access_mode?: import("@/lib/types").AccessMode;
@@ -185,6 +188,7 @@ export default function DashboardPage() {
onExport={() => exportChannel(channel)} onExport={() => exportChannel(channel)}
onMoveUp={() => handleMoveUp(channel.id)} onMoveUp={() => handleMoveUp(channel.id)}
onMoveDown={() => handleMoveDown(channel.id)} onMoveDown={() => handleMoveDown(channel.id)}
onScheduleHistory={() => setScheduleHistoryChannelId(channel.id)}
/> />
))} ))}
</div> </div>
@@ -245,6 +249,14 @@ export default function DashboardPage() {
}} }}
/> />
{scheduleHistoryChannelId && (
<ScheduleHistoryDialog
channelId={scheduleHistoryChannelId}
open={!!scheduleHistoryChannelId}
onOpenChange={open => !open && setScheduleHistoryChannelId(null)}
/>
)}
{deleteTarget && ( {deleteTarget && (
<DeleteChannelDialog <DeleteChannelDialog
channelName={deleteTarget.name} channelName={deleteTarget.name}

View File

@@ -6,6 +6,7 @@ import { AdminNavLink } from "./components/admin-nav-link";
const NAV_LINKS = [ const NAV_LINKS = [
{ href: "/tv", label: "TV" }, { href: "/tv", label: "TV" },
{ href: "/guide", label: "Guide" }, { href: "/guide", label: "Guide" },
{ href: "/library", label: "Library" },
{ href: "/dashboard", label: "Dashboard" }, { href: "/dashboard", label: "Dashboard" },
{ href: "/docs", label: "Docs" }, { href: "/docs", label: "Docs" },
]; ];

View File

@@ -0,0 +1,111 @@
"use client";
import { useState, useMemo } from "react";
import { Button } from "@/components/ui/button";
import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogFooter } from "@/components/ui/dialog";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { useChannels, useChannel, useUpdateChannel } from "@/hooks/use-channels";
import type { LibraryItemFull, ScheduleConfig } from "@/lib/types";
import { WEEKDAYS } from "@/lib/types";
interface Props {
selectedItems: LibraryItemFull[];
}
export function AddToBlockDialog({ selectedItems }: Props) {
const [open, setOpen] = useState(false);
const [channelId, setChannelId] = useState("");
const [blockId, setBlockId] = useState("");
const { data: channels } = useChannels();
const { data: channel } = useChannel(channelId);
const updateChannel = useUpdateChannel();
const manualBlocks = useMemo(() => {
if (!channel) return [];
const seen = new Set<string>();
const result: { id: string; name: string }[] = [];
for (const day of WEEKDAYS) {
for (const block of channel.schedule_config.day_blocks[day] ?? []) {
if (block.content.type === "manual" && !seen.has(block.id)) {
seen.add(block.id);
result.push({ id: block.id, name: block.name });
}
}
}
return result;
}, [channel]);
async function handleConfirm() {
if (!channel || !blockId) return;
const updatedDayBlocks = { ...channel.schedule_config.day_blocks };
for (const day of WEEKDAYS) {
updatedDayBlocks[day] = (updatedDayBlocks[day] ?? []).map(block => {
if (block.id !== blockId || block.content.type !== "manual") return block;
return {
...block,
content: {
...block.content,
items: [...block.content.items, ...selectedItems.map(i => i.id)],
},
};
});
}
const scheduleConfig: ScheduleConfig = { day_blocks: updatedDayBlocks };
await updateChannel.mutateAsync({
id: channelId,
data: { schedule_config: scheduleConfig },
});
setOpen(false);
}
return (
<>
<Button size="sm" variant="outline" onClick={() => setOpen(true)}>Add to block</Button>
<Dialog open={open} onOpenChange={setOpen}>
<DialogContent className="max-w-sm">
<DialogHeader><DialogTitle>Add to existing block</DialogTitle></DialogHeader>
<div className="flex flex-col gap-4">
<div>
<p className="mb-1.5 text-xs text-zinc-400">Channel</p>
<Select value={channelId} onValueChange={v => { setChannelId(v); setBlockId(""); }}>
<SelectTrigger><SelectValue placeholder="Select channel…" /></SelectTrigger>
<SelectContent>
{channels?.map(c => (
<SelectItem key={c.id} value={c.id}>{c.name}</SelectItem>
))}
</SelectContent>
</Select>
</div>
{channelId && (
<div>
<p className="mb-1.5 text-xs text-zinc-400">Manual block</p>
{manualBlocks.length === 0 ? (
<p className="text-xs text-zinc-500">No manual blocks in this channel.</p>
) : (
<Select value={blockId} onValueChange={setBlockId}>
<SelectTrigger><SelectValue placeholder="Select block…" /></SelectTrigger>
<SelectContent>
{manualBlocks.map(b => (
<SelectItem key={b.id} value={b.id}>{b.name}</SelectItem>
))}
</SelectContent>
</Select>
)}
</div>
)}
<p className="text-xs text-zinc-500">Adding {selectedItems.length} item(s) to selected block.</p>
</div>
<DialogFooter>
<Button variant="outline" onClick={() => setOpen(false)}>Cancel</Button>
<Button disabled={!blockId || updateChannel.isPending} onClick={handleConfirm}>
{updateChannel.isPending ? "Saving…" : "Add items"}
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</>
);
}

View File

@@ -0,0 +1,41 @@
interface Props {
series?: string;
season?: number;
onNavigate: (target: "root" | "series") => void;
}
export function BreadcrumbNav({ series, season, onNavigate }: Props) {
return (
<nav className="flex items-center gap-1 px-1 py-2 text-sm">
<button
type="button"
className="text-zinc-400 hover:text-zinc-100 transition-colors"
onClick={() => onNavigate("root")}
>
Library
</button>
{series && (
<>
<span className="text-zinc-600"></span>
{season != null ? (
<button
type="button"
className="text-zinc-400 hover:text-zinc-100 transition-colors"
onClick={() => onNavigate("series")}
>
{series}
</button>
) : (
<span className="text-zinc-100">{series}</span>
)}
</>
)}
{season != null && (
<>
<span className="text-zinc-600"></span>
<span className="text-zinc-100">Season {season}</span>
</>
)}
</nav>
);
}

View File

@@ -0,0 +1,183 @@
"use client";
import { useLibraryShows } from "@/hooks/use-library-shows";
import { useLibrarySeasons } from "@/hooks/use-library-seasons";
import { LibraryItemCard } from "./library-item-card";
import { ShowTile } from "./show-tile";
import { SeasonTile } from "./season-tile";
import { BreadcrumbNav } from "./breadcrumb-nav";
import { ScheduleFromLibraryDialog } from "./schedule-from-library-dialog";
import { AddToBlockDialog } from "./add-to-block-dialog";
import { Button } from "@/components/ui/button";
import type { LibraryItemFull, ShowSummary } from "@/lib/types";
import type { LibrarySearchParams } from "@/hooks/use-library-search";
type Drilldown = null | { series: string } | { series: string; season: number };
interface Props {
items: LibraryItemFull[];
total: number;
page: number;
pageSize: number;
isLoading: boolean;
selected: Set<string>;
onToggleSelect: (id: string) => void;
onPageChange: (page: number) => void;
selectedItems: LibraryItemFull[];
viewMode: "grouped" | "flat";
drilldown: Drilldown;
onDrilldown: (next: Drilldown) => void;
filter: LibrarySearchParams;
selectedShows: ShowSummary[];
selectedShowNames: Set<string>;
onToggleSelectShow: (show: ShowSummary) => void;
}
export function LibraryGrid({
items, total, page, pageSize, isLoading,
selected, onToggleSelect, onPageChange, selectedItems,
viewMode, drilldown, onDrilldown, filter,
selectedShows, selectedShowNames, onToggleSelectShow,
}: Props) {
const totalPages = Math.ceil(total / pageSize);
// Hooks for grouped mode (called unconditionally per React rules)
const showsFilter = {
q: filter.q,
genres: filter.genres,
provider: filter.provider,
};
const { data: showsData, isLoading: showsLoading } = useLibraryShows(showsFilter);
const seasonsSeries = (viewMode === "grouped" && drilldown !== null && !("season" in drilldown))
? drilldown.series
: null;
const { data: seasonsData, isLoading: seasonsLoading } = useLibrarySeasons(
seasonsSeries,
filter.provider,
);
const isGroupedTopLevel = viewMode === "grouped" && drilldown === null;
const isSeasonLevel = viewMode === "grouped" && drilldown !== null && !("season" in drilldown);
const isEpisodeLevel = viewMode === "flat" || (viewMode === "grouped" && drilldown !== null && "season" in drilldown);
function renderContent() {
if (isGroupedTopLevel) {
// Only show TV show tiles when no type filter is active — "Movies"/"Shorts" should not include shows
const shows = !filter.type ? (showsData ?? []) : [];
const nonEpisodes = items.filter(i => i.content_type !== "episode");
const loading = showsLoading;
if (loading && shows.length === 0 && nonEpisodes.length === 0) {
return <p className="text-sm text-zinc-500">Loading</p>;
}
if (shows.length === 0 && nonEpisodes.length === 0) {
return <p className="text-sm text-zinc-500">No items found. Run a library sync to populate the library.</p>;
}
return (
<div className="grid grid-cols-2 gap-3 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6">
{shows.map(show => (
<ShowTile
key={show.series_name}
show={show}
selected={selectedShowNames.has(show.series_name)}
onToggle={() => onToggleSelectShow(show)}
onClick={() => onDrilldown({ series: show.series_name })}
/>
))}
{nonEpisodes.map(item => (
<LibraryItemCard
key={item.id}
item={item}
selected={selected.has(item.id)}
onToggle={() => onToggleSelect(item.id)}
/>
))}
</div>
);
}
if (isSeasonLevel && drilldown) {
const seasons = seasonsData ?? [];
if (seasonsLoading && seasons.length === 0) {
return <p className="text-sm text-zinc-500">Loading</p>;
}
if (seasons.length === 0) {
return <p className="text-sm text-zinc-500">No seasons found.</p>;
}
return (
<div className="grid grid-cols-2 gap-3 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6">
{seasons.map(season => (
<SeasonTile
key={season.season_number}
season={season}
selected={false}
onToggle={() => {}}
onClick={() => onDrilldown({ series: drilldown.series, season: season.season_number })}
/>
))}
</div>
);
}
// Flat mode or episode-level drilldown
if (isLoading) {
return <p className="text-sm text-zinc-500">Loading</p>;
}
if (items.length === 0) {
return <p className="text-sm text-zinc-500">No items found. Run a library sync to populate the library.</p>;
}
return (
<div className="grid grid-cols-2 gap-3 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6">
{items.map(item => (
<LibraryItemCard
key={item.id}
item={item}
selected={selected.has(item.id)}
onToggle={() => onToggleSelect(item.id)}
/>
))}
</div>
);
}
const showPagination = isEpisodeLevel && totalPages > 1;
const totalSelected = selected.size + selectedShows.length;
return (
<div className="flex flex-1 flex-col min-h-0">
<div className="flex-1 overflow-y-auto p-6">
{drilldown && (
<BreadcrumbNav
series={"series" in drilldown ? drilldown.series : undefined}
season={"season" in drilldown ? drilldown.season : undefined}
onNavigate={target => {
if (target === "root") onDrilldown(null);
else if (target === "series" && drilldown && "series" in drilldown)
onDrilldown({ series: drilldown.series });
}}
/>
)}
{renderContent()}
</div>
{showPagination && (
<div className="flex items-center justify-between border-t border-zinc-800 px-6 py-3">
<p className="text-xs text-zinc-500">{total.toLocaleString()} items total</p>
<div className="flex gap-2">
<Button size="sm" variant="outline" disabled={page === 0} onClick={() => onPageChange(page - 1)}>Prev</Button>
<span className="flex items-center text-xs text-zinc-400">{page + 1} / {totalPages}</span>
<Button size="sm" variant="outline" disabled={page >= totalPages - 1} onClick={() => onPageChange(page + 1)}>Next</Button>
</div>
</div>
)}
{totalSelected > 0 && (
<div className="fixed bottom-6 left-1/2 -translate-x-1/2 flex items-center gap-3 rounded-full border border-zinc-700 bg-zinc-900 px-6 py-3 shadow-2xl">
<span className="text-sm text-zinc-300">{totalSelected} selected</span>
<ScheduleFromLibraryDialog selectedItems={selectedItems} selectedShows={selectedShows} />
{selected.size > 0 && <AddToBlockDialog selectedItems={selectedItems} />}
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,52 @@
"use client";
import { useState } from "react";
import { Checkbox } from "@/components/ui/checkbox";
import type { LibraryItemFull } from "@/lib/types";
interface Props {
item: LibraryItemFull;
selected: boolean;
onToggle: () => void;
}
export function LibraryItemCard({ item, selected, onToggle }: Props) {
const [imgError, setImgError] = useState(false);
const mins = Math.ceil(item.duration_secs / 60);
return (
<div
className={`group relative cursor-pointer rounded-lg border transition-colors ${
selected
? "border-violet-500 bg-violet-950/30"
: "border-zinc-800 bg-zinc-900 hover:border-zinc-600"
}`}
onClick={onToggle}
>
<div className="aspect-video w-full overflow-hidden rounded-t-lg bg-zinc-800">
{item.thumbnail_url && !imgError ? (
<img
src={item.thumbnail_url}
alt={item.title}
className="h-full w-full object-cover"
onError={() => setImgError(true)}
/>
) : (
<div className="flex h-full items-center justify-center text-zinc-600 text-xs">No image</div>
)}
</div>
<div className="absolute left-2 top-2" onClick={e => { e.stopPropagation(); onToggle(); }}>
<Checkbox checked={selected} className="border-white/50 bg-black/40" />
</div>
<div className="p-2">
<p className="truncate text-xs font-medium text-zinc-100">{item.title}</p>
<p className="mt-0.5 text-xs text-zinc-500">
{item.content_type === "episode" && item.series_name
? `${item.series_name} S${item.season_number ?? "?"}E${item.episode_number ?? "?"}`
: item.content_type}
{" · "}{mins >= 60 ? `${Math.floor(mins / 60)}h ${mins % 60}m` : `${mins}m`}
</p>
</div>
</div>
);
}

View File

@@ -0,0 +1,129 @@
"use client";
import { useCollections, useGenres } from "@/hooks/use-library";
import type { LibrarySearchParams } from "@/hooks/use-library-search";
import { Input } from "@/components/ui/input";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Badge } from "@/components/ui/badge";
import { ArrowLeft } from "lucide-react";
interface Props {
filter: LibrarySearchParams;
onFilterChange: (next: Partial<LibrarySearchParams>) => void;
viewMode: "grouped" | "flat";
drilldown: null | { series: string } | { series: string; season: number };
onBack: () => void;
}
const ALL = "__all__";
const CONTENT_TYPES_ALL = [
{ value: ALL, label: "All types" },
{ value: "movie", label: "Movies" },
{ value: "episode", label: "Episodes" },
{ value: "short", label: "Shorts" },
];
const CONTENT_TYPES_GROUPED = [
{ value: ALL, label: "All types" },
{ value: "movie", label: "Movies" },
{ value: "short", label: "Shorts" },
];
export function LibrarySidebar({ filter, onFilterChange, viewMode, drilldown, onBack }: Props) {
const { data: collections } = useCollections(filter.provider);
const { data: genres } = useGenres(filter.type, { provider: filter.provider });
if (drilldown !== null) {
return (
<aside className="w-56 shrink-0 border-r border-zinc-800 bg-zinc-950 p-4 flex flex-col gap-4">
<button
type="button"
onClick={onBack}
className="flex items-center gap-1.5 text-xs text-zinc-400 hover:text-zinc-100 transition-colors"
>
<ArrowLeft className="h-3.5 w-3.5" />
Back
</button>
<div>
<p className="mb-1.5 text-xs font-medium uppercase tracking-wider text-zinc-500">Search</p>
<Input
placeholder="Search…"
value={filter.q ?? ""}
onChange={e => onFilterChange({ q: e.target.value || undefined })}
className="h-8 text-xs"
/>
</div>
</aside>
);
}
const contentTypes = viewMode === "grouped" ? CONTENT_TYPES_GROUPED : CONTENT_TYPES_ALL;
return (
<aside className="w-56 shrink-0 border-r border-zinc-800 bg-zinc-950 p-4 flex flex-col gap-4">
<div>
<p className="mb-1.5 text-xs font-medium uppercase tracking-wider text-zinc-500">Search</p>
<Input
placeholder="Search…"
value={filter.q ?? ""}
onChange={e => onFilterChange({ q: e.target.value || undefined })}
className="h-8 text-xs"
/>
</div>
<div>
<p className="mb-1.5 text-xs font-medium uppercase tracking-wider text-zinc-500">Type</p>
<Select value={filter.type ?? ALL} onValueChange={v => onFilterChange({ type: v === ALL ? undefined : v })}>
<SelectTrigger className="h-8 text-xs"><SelectValue /></SelectTrigger>
<SelectContent>
{contentTypes.map(ct => (
<SelectItem key={ct.value} value={ct.value}>{ct.label}</SelectItem>
))}
</SelectContent>
</Select>
</div>
{collections && collections.length > 0 && (
<div>
<p className="mb-1.5 text-xs font-medium uppercase tracking-wider text-zinc-500">Collection</p>
<Select value={filter.collection ?? ""} onValueChange={v => onFilterChange({ collection: v || undefined })}>
<SelectTrigger className="h-8 text-xs"><SelectValue placeholder="All" /></SelectTrigger>
<SelectContent>
<SelectItem value="">All</SelectItem>
{collections.map(c => (
<SelectItem key={c.id} value={c.id}>{c.name}</SelectItem>
))}
</SelectContent>
</Select>
</div>
)}
{genres && genres.length > 0 && (
<div>
<p className="mb-1.5 text-xs font-medium uppercase tracking-wider text-zinc-500">Genre</p>
<div className="flex flex-wrap gap-1">
{genres.map(g => {
const active = filter.genres?.includes(g) ?? false;
return (
<Badge
key={g}
variant={active ? "default" : "outline"}
className="cursor-pointer text-xs"
onClick={() => {
const current = filter.genres ?? [];
onFilterChange({
genres: active ? current.filter(x => x !== g) : [...current, g],
});
}}
>
{g}
</Badge>
);
})}
</div>
</div>
)}
</aside>
);
}

View File

@@ -0,0 +1,211 @@
"use client";
import { useState } from "react";
import { Button } from "@/components/ui/button";
import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogFooter } from "@/components/ui/dialog";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Input } from "@/components/ui/input";
import { Checkbox } from "@/components/ui/checkbox";
import { useChannels, useUpdateChannel } from "@/hooks/use-channels";
import type { LibraryItemFull, ShowSummary, Weekday, ProgrammingBlock, ScheduleConfig } from "@/lib/types";
import { WEEKDAYS, WEEKDAY_LABELS } from "@/lib/types";
interface Props {
selectedItems: LibraryItemFull[];
selectedShows?: ShowSummary[];
}
export function ScheduleFromLibraryDialog({ selectedItems, selectedShows }: Props) {
const [open, setOpen] = useState(false);
const [channelId, setChannelId] = useState("");
const [selectedDays, setSelectedDays] = useState<Set<Weekday>>(new Set());
const [startTime, setStartTime] = useState("20:00");
const [durationMins, setDurationMins] = useState(() => {
if (selectedItems.length === 1) return Math.ceil(selectedItems[0].duration_secs / 60);
return 60;
});
const [strategy, setStrategy] = useState<"sequential" | "random" | "best_fit">("sequential");
const { data: channels } = useChannels();
const updateChannel = useUpdateChannel();
const selectedChannel = channels?.find(c => c.id === channelId);
const isEpisodic = selectedItems.every(i => i.content_type === "episode");
const allSameSeries =
isEpisodic &&
selectedItems.length > 0 &&
new Set(selectedItems.map(i => i.series_name)).size === 1;
function toggleDay(day: Weekday) {
setSelectedDays(prev => {
const next = new Set(prev);
if (next.has(day)) next.delete(day);
else next.add(day);
return next;
});
}
async function handleConfirm() {
if (!selectedChannel || selectedDays.size === 0) return;
const startTimeFull = startTime.length === 5 ? `${startTime}:00` : startTime;
const hasShows = selectedShows && selectedShows.length > 0;
const newBlock: ProgrammingBlock = hasShows
? {
id: globalThis.crypto.randomUUID(),
name: selectedShows!.length === 1
? `${selectedShows![0].series_name}${startTime}`
: `${selectedShows!.length} shows — ${startTime}`,
start_time: startTimeFull,
duration_mins: durationMins,
content: {
type: "algorithmic",
filter: {
content_type: "episode",
series_names: selectedShows!.map(s => s.series_name),
genres: [],
tags: [],
collections: [],
},
strategy,
},
}
: allSameSeries
? {
id: globalThis.crypto.randomUUID(),
name: `${selectedItems[0].series_name ?? "Series"}${startTime}`,
start_time: startTimeFull,
duration_mins: durationMins,
content: {
type: "algorithmic",
filter: {
content_type: "episode",
series_names: [selectedItems[0].series_name!],
genres: [],
tags: [],
collections: [],
},
strategy,
provider_id: selectedItems[0].id.split("::")[0],
},
}
: {
id: globalThis.crypto.randomUUID(),
name: `${selectedItems.length} items — ${startTime}`,
start_time: startTimeFull,
duration_mins: durationMins,
content: { type: "manual", items: selectedItems.map(i => i.id) },
};
const updatedDayBlocks = { ...selectedChannel.schedule_config.day_blocks };
for (const day of selectedDays) {
updatedDayBlocks[day] = [...(updatedDayBlocks[day] ?? []), newBlock];
}
const scheduleConfig: ScheduleConfig = { day_blocks: updatedDayBlocks };
await updateChannel.mutateAsync({
id: channelId,
data: { schedule_config: scheduleConfig },
});
setOpen(false);
}
const canConfirm = !!channelId && selectedDays.size > 0;
const daysLabel = [...selectedDays].map(d => WEEKDAY_LABELS[d]).join(", ");
const hasShows = selectedShows && selectedShows.length > 0;
const contentLabel = hasShows
? (selectedShows!.length === 1 ? selectedShows![0].series_name : `${selectedShows!.length} shows`)
: `${selectedItems.length} item(s)`;
const preview = canConfirm
? `${selectedDays.size} block(s) of ${contentLabel} will be created on ${selectedChannel?.name}${daysLabel} at ${startTime}, ${strategy}`
: null;
return (
<>
<Button size="sm" onClick={() => setOpen(true)}>Schedule on channel</Button>
<Dialog open={open} onOpenChange={setOpen}>
<DialogContent className="max-w-md">
<DialogHeader><DialogTitle>Schedule on channel</DialogTitle></DialogHeader>
<div className="flex flex-col gap-4">
<div>
<p className="mb-1.5 text-xs text-zinc-400">Channel</p>
<Select value={channelId} onValueChange={setChannelId}>
<SelectTrigger><SelectValue placeholder="Select channel…" /></SelectTrigger>
<SelectContent>
{channels?.map(c => (
<SelectItem key={c.id} value={c.id}>{c.name}</SelectItem>
))}
</SelectContent>
</Select>
</div>
<div>
<p className="mb-1.5 text-xs text-zinc-400">Days</p>
<div className="flex flex-wrap gap-2">
{WEEKDAYS.map(day => (
<label key={day} className="flex items-center gap-1.5 cursor-pointer">
<Checkbox checked={selectedDays.has(day)} onCheckedChange={() => toggleDay(day)} />
<span className="text-xs">{WEEKDAY_LABELS[day]}</span>
</label>
))}
</div>
</div>
<div className="flex gap-4">
<div className="flex-1">
<p className="mb-1.5 text-xs text-zinc-400">
Start time{selectedChannel?.timezone ? ` (${selectedChannel.timezone})` : ""}
</p>
<Input
type="time"
value={startTime}
onChange={e => setStartTime(e.target.value)}
disabled={!channelId}
/>
</div>
<div className="flex-1">
<p className="mb-1.5 text-xs text-zinc-400">Duration (mins)</p>
<Input
type="number"
min={1}
value={durationMins}
onChange={e => setDurationMins(Number(e.target.value))}
disabled={!channelId}
/>
</div>
</div>
<div>
<p className="mb-1.5 text-xs text-zinc-400">Fill strategy</p>
<Select
value={strategy}
onValueChange={(v: "sequential" | "random" | "best_fit") => setStrategy(v)}
disabled={!channelId}
>
<SelectTrigger><SelectValue /></SelectTrigger>
<SelectContent>
<SelectItem value="sequential">Sequential</SelectItem>
<SelectItem value="random">Random</SelectItem>
<SelectItem value="best_fit">Best fit</SelectItem>
</SelectContent>
</Select>
</div>
{preview && (
<p className="rounded-md bg-emerald-950/30 border border-emerald-800 px-3 py-2 text-xs text-emerald-300">
{preview}
</p>
)}
</div>
<DialogFooter>
<Button variant="outline" onClick={() => setOpen(false)}>Cancel</Button>
<Button
disabled={!canConfirm || updateChannel.isPending}
onClick={handleConfirm}
>
{updateChannel.isPending ? "Saving…" : `Create ${selectedDays.size} block(s)`}
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</>
);
}

View File

@@ -0,0 +1,63 @@
"use client";
import { Film } from "lucide-react";
import type { SeasonSummary } from "@/lib/types";
interface Props {
season: SeasonSummary;
selected: boolean;
onToggle: () => void;
onClick: () => void;
}
export function SeasonTile({ season, selected, onToggle, onClick }: Props) {
return (
<div className="group relative flex flex-col overflow-hidden rounded-lg border border-zinc-800 bg-zinc-900 transition-colors hover:border-zinc-600">
<button
type="button"
className="relative aspect-video w-full overflow-hidden bg-zinc-800"
onClick={onClick}
>
{season.thumbnail_url ? (
// eslint-disable-next-line @next/next/no-img-element
<img
src={season.thumbnail_url}
alt={`Season ${season.season_number}`}
className="h-full w-full object-cover transition-transform group-hover:scale-105"
/>
) : (
<div className="flex h-full w-full items-center justify-center">
<Film className="h-10 w-10 text-zinc-600" />
</div>
)}
{selected && (
<div className="absolute inset-0 bg-blue-600/30 ring-2 ring-inset ring-blue-500" />
)}
</button>
<div className="flex flex-col gap-1 p-2">
<button
type="button"
className="truncate text-left text-sm font-medium text-zinc-100 hover:text-white"
onClick={onClick}
>
Season {season.season_number}
</button>
<p className="text-xs text-zinc-500">{season.episode_count} episodes</p>
</div>
<button
type="button"
onClick={e => { e.stopPropagation(); onToggle(); }}
className={`absolute left-1.5 top-1.5 flex h-5 w-5 items-center justify-center rounded border text-xs font-bold transition-opacity ${
selected
? "border-blue-500 bg-blue-600 text-white opacity-100"
: "border-zinc-600 bg-zinc-900/80 text-transparent opacity-0 group-hover:opacity-100"
}`}
aria-label={selected ? "Deselect" : "Select"}
>
</button>
</div>
);
}

View File

@@ -0,0 +1,75 @@
"use client";
import { Tv } from "lucide-react";
import type { ShowSummary } from "@/lib/types";
interface Props {
show: ShowSummary;
selected: boolean;
onToggle: () => void;
onClick: () => void;
}
export function ShowTile({ show, selected, onToggle, onClick }: Props) {
return (
<div className="group relative flex flex-col overflow-hidden rounded-lg border border-zinc-800 bg-zinc-900 transition-colors hover:border-zinc-600">
{/* Thumbnail area - clickable to drill in */}
<button
type="button"
className="relative aspect-video w-full overflow-hidden bg-zinc-800"
onClick={onClick}
>
{show.thumbnail_url ? (
// eslint-disable-next-line @next/next/no-img-element
<img
src={show.thumbnail_url}
alt={show.series_name}
className="h-full w-full object-cover transition-transform group-hover:scale-105"
/>
) : (
<div className="flex h-full w-full items-center justify-center">
<Tv className="h-10 w-10 text-zinc-600" />
</div>
)}
{/* Selection overlay */}
{selected && (
<div className="absolute inset-0 bg-blue-600/30 ring-2 ring-inset ring-blue-500" />
)}
</button>
{/* Info area */}
<div className="flex flex-col gap-1 p-2">
<button
type="button"
className="truncate text-left text-sm font-medium text-zinc-100 hover:text-white"
onClick={onClick}
title={show.series_name}
>
{show.series_name}
</button>
<p className="text-xs text-zinc-500">
{show.season_count} {show.season_count === 1 ? "season" : "seasons"} · {show.episode_count} eps
</p>
{show.genres.length > 0 && (
<p className="truncate text-xs text-zinc-600">
{show.genres.slice(0, 3).join(", ")}
</p>
)}
</div>
{/* Select checkbox */}
<button
type="button"
onClick={e => { e.stopPropagation(); onToggle(); }}
className={`absolute left-1.5 top-1.5 flex h-5 w-5 items-center justify-center rounded border text-xs font-bold transition-opacity ${
selected
? "border-blue-500 bg-blue-600 text-white opacity-100"
: "border-zinc-600 bg-zinc-900/80 text-transparent opacity-0 group-hover:opacity-100"
}`}
aria-label={selected ? "Deselect" : "Select"}
>
</button>
</div>
);
}

View File

@@ -0,0 +1,30 @@
"use client";
import { useLibrarySyncStatus } from "@/hooks/use-library-sync";
export function SyncStatusBar() {
const { data: statuses } = useLibrarySyncStatus();
if (!statuses || statuses.length === 0) return null;
return (
<div className="border-b border-zinc-800 bg-zinc-900 px-6 py-1.5">
<div className="flex flex-wrap gap-4">
{statuses.map(s => (
<span key={s.id} className="text-xs text-zinc-500">
{s.provider_id}:{" "}
{s.status === "running" ? (
<span className="text-yellow-400">syncing</span>
) : s.status === "error" ? (
<span className="text-red-400">error</span>
) : (
<span className="text-zinc-400">
{s.items_found.toLocaleString()} items
{s.finished_at ? ` · synced ${new Date(s.finished_at).toLocaleTimeString()}` : ""}
</span>
)}
</span>
))}
</div>
</div>
);
}

View File

@@ -0,0 +1,134 @@
"use client";
import { useState } from "react";
import { LayoutGrid, FolderOpen } from "lucide-react";
import { useLibrarySearch, type LibrarySearchParams } from "@/hooks/use-library-search";
import { LibrarySidebar } from "./components/library-sidebar";
import { LibraryGrid } from "./components/library-grid";
import { SyncStatusBar } from "./components/sync-status-bar";
import { Button } from "@/components/ui/button";
import type { LibraryItemFull, ShowSummary } from "@/lib/types";
const PAGE_SIZE = 50;
type Drilldown = null | { series: string } | { series: string; season: number };
export default function LibraryPage() {
const [filter, setFilter] = useState<LibrarySearchParams>({ limit: PAGE_SIZE, offset: 0 });
const [selected, setSelected] = useState<Set<string>>(new Set());
const [page, setPage] = useState(0);
const [viewMode, setViewMode] = useState<"grouped" | "flat">("grouped");
const [drilldown, setDrilldown] = useState<Drilldown>(null);
const [selectedShowMap, setSelectedShowMap] = useState<Map<string, ShowSummary>>(new Map());
const { data, isLoading } = useLibrarySearch({ ...filter, offset: page * PAGE_SIZE });
function handleFilterChange(next: Partial<LibrarySearchParams>) {
setFilter(f => ({ ...f, ...next, offset: 0 }));
setPage(0);
setSelected(new Set());
}
function toggleSelect(id: string) {
setSelected(prev => {
const next = new Set(prev);
if (next.has(id)) next.delete(id);
else next.add(id);
return next;
});
}
function toggleSelectShow(show: ShowSummary) {
setSelectedShowMap(prev => {
const next = new Map(prev);
if (next.has(show.series_name)) next.delete(show.series_name);
else next.set(show.series_name, show);
return next;
});
}
function handleDrilldown(next: Drilldown) {
setDrilldown(next);
setSelected(new Set());
setSelectedShowMap(new Map());
setPage(0);
}
function handleBack() {
if (!drilldown) return;
if ("season" in drilldown) {
// From episode level → go back to season level
setDrilldown({ series: drilldown.series });
} else {
// From season level → go back to root
setDrilldown(null);
}
setSelected(new Set());
setPage(0);
}
function handleViewModeToggle() {
const next = viewMode === "grouped" ? "flat" : "grouped";
setViewMode(next);
if (next === "flat") setDrilldown(null);
setSelected(new Set());
setSelectedShowMap(new Map());
}
const selectedItems = data?.items.filter(i => selected.has(i.id)) ?? [];
const selectedShows = Array.from(selectedShowMap.values());
const selectedShowNames = new Set(selectedShowMap.keys());
return (
<div className="flex flex-1 flex-col">
<SyncStatusBar />
<div className="flex items-center justify-end gap-2 border-b border-zinc-800 bg-zinc-950 px-4 py-2">
<Button
size="sm"
variant="ghost"
className="gap-1.5 text-xs"
onClick={handleViewModeToggle}
>
{viewMode === "grouped" ? (
<>
<LayoutGrid className="h-3.5 w-3.5" />
Flat view
</>
) : (
<>
<FolderOpen className="h-3.5 w-3.5" />
Grouped view
</>
)}
</Button>
</div>
<div className="flex flex-1">
<LibrarySidebar
filter={filter}
onFilterChange={handleFilterChange}
viewMode={viewMode}
drilldown={drilldown}
onBack={handleBack}
/>
<LibraryGrid
items={data?.items ?? []}
total={data?.total ?? 0}
page={page}
pageSize={PAGE_SIZE}
isLoading={isLoading}
selected={selected}
onToggleSelect={toggleSelect}
onPageChange={setPage}
selectedItems={selectedItems}
viewMode={viewMode}
drilldown={drilldown}
onDrilldown={handleDrilldown}
filter={filter}
selectedShows={selectedShows}
selectedShowNames={selectedShowNames}
onToggleSelectShow={toggleSelectShow}
/>
</div>
</div>
);
}

View File

@@ -15,7 +15,7 @@ import { Toaster } from "@/components/ui/sonner";
import { ApiRequestError } from "@/lib/api"; import { ApiRequestError } from "@/lib/api";
function QueryProvider({ children }: { children: React.ReactNode }) { function QueryProvider({ children }: { children: React.ReactNode }) {
const { token, setToken } = useAuthContext(); const { token, setTokens } = useAuthContext();
const router = useRouter(); const router = useRouter();
const tokenRef = useRef(token); const tokenRef = useRef(token);
useEffect(() => { tokenRef.current = token; }, [token]); useEffect(() => { tokenRef.current = token; }, [token]);
@@ -29,7 +29,7 @@ function QueryProvider({ children }: { children: React.ReactNode }) {
// Guests hitting 401 on restricted content should not be redirected. // Guests hitting 401 on restricted content should not be redirected.
if (error instanceof ApiRequestError && error.status === 401 && tokenRef.current) { if (error instanceof ApiRequestError && error.status === 401 && tokenRef.current) {
toast.warning("Session expired, please log in again."); toast.warning("Session expired, please log in again.");
setToken(null); setTokens(null, null, false);
router.push("/login"); router.push("/login");
} }
}, },
@@ -39,7 +39,7 @@ function QueryProvider({ children }: { children: React.ReactNode }) {
// Mutations always require auth — redirect on 401 regardless. // Mutations always require auth — redirect on 401 regardless.
if (error instanceof ApiRequestError && error.status === 401) { if (error instanceof ApiRequestError && error.status === 401) {
toast.warning("Session expired, please log in again."); toast.warning("Session expired, please log in again.");
setToken(null); setTokens(null, null, false);
router.push("/login"); router.push("/login");
} }
}, },

View File

@@ -4,42 +4,94 @@ import {
createContext, createContext,
useContext, useContext,
useState, useState,
useEffect,
type ReactNode, type ReactNode,
} from "react"; } from "react";
import { useRouter } from "next/navigation";
import { api, setRefreshCallback } from "@/lib/api";
const TOKEN_KEY = "k-tv-token"; const ACCESS_KEY_LOCAL = "k-tv-token";
const ACCESS_KEY_SESSION = "k-tv-token-session";
const REFRESH_KEY = "k-tv-refresh-token";
interface AuthContextValue { interface AuthContextValue {
token: string | null; token: string | null;
/** True once the initial localStorage read has completed */ refreshToken: string | null;
/** Always true (lazy init reads storage synchronously) */
isLoaded: boolean; isLoaded: boolean;
setToken: (token: string | null) => void; setTokens: (access: string | null, refresh: string | null, remember: boolean) => void;
} }
const AuthContext = createContext<AuthContextValue | null>(null); const AuthContext = createContext<AuthContextValue | null>(null);
export function AuthProvider({ children }: { children: ReactNode }) { export function AuthProvider({ children }: { children: ReactNode }) {
const router = useRouter();
const [token, setTokenState] = useState<string | null>(() => { const [token, setTokenState] = useState<string | null>(() => {
try { try {
return localStorage.getItem(TOKEN_KEY); return sessionStorage.getItem(ACCESS_KEY_SESSION) ?? localStorage.getItem(ACCESS_KEY_LOCAL);
} catch { } catch {
return null; return null;
} }
}); });
// isLoaded is always true: lazy init above reads localStorage synchronously
const [isLoaded] = useState(true);
const setToken = (t: string | null) => { const [refreshToken, setRefreshTokenState] = useState<string | null>(() => {
setTokenState(t); try {
if (t) { return localStorage.getItem(REFRESH_KEY);
localStorage.setItem(TOKEN_KEY, t); } catch {
} else { return null;
localStorage.removeItem(TOKEN_KEY);
} }
});
const setTokens = (access: string | null, refresh: string | null, remember: boolean) => {
try {
if (access === null) {
sessionStorage.removeItem(ACCESS_KEY_SESSION);
localStorage.removeItem(ACCESS_KEY_LOCAL);
localStorage.removeItem(REFRESH_KEY);
} else if (remember) {
localStorage.setItem(ACCESS_KEY_LOCAL, access);
sessionStorage.removeItem(ACCESS_KEY_SESSION);
if (refresh) {
localStorage.setItem(REFRESH_KEY, refresh);
} else {
localStorage.removeItem(REFRESH_KEY);
}
} else {
sessionStorage.setItem(ACCESS_KEY_SESSION, access);
localStorage.removeItem(ACCESS_KEY_LOCAL);
localStorage.removeItem(REFRESH_KEY);
}
} catch {
// storage unavailable — state-only fallback
}
setTokenState(access);
setRefreshTokenState(refresh);
}; };
// Wire up the transparent refresh callback in api.ts
useEffect(() => {
if (refreshToken) {
setRefreshCallback(async () => {
try {
const data = await api.auth.refresh(refreshToken);
const newRefresh = data.refresh_token ?? null;
setTokens(data.access_token, newRefresh, true);
return data.access_token;
} catch {
setTokens(null, null, false);
router.push("/login");
return null;
}
});
} else {
setRefreshCallback(null);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [refreshToken]);
return ( return (
<AuthContext.Provider value={{ token, isLoaded, setToken }}> <AuthContext.Provider value={{ token, refreshToken, isLoaded: true, setTokens }}>
{children} {children}
</AuthContext.Provider> </AuthContext.Provider>
); );

View File

@@ -13,17 +13,31 @@ export function useProviderConfigs() {
}); });
} }
export function useCreateProvider() {
const { token } = useAuthContext();
const qc = useQueryClient();
return useMutation({
mutationFn: (payload: {
id: string;
provider_type: string;
config_json: Record<string, string>;
enabled: boolean;
}) => api.admin.providers.createProvider(token!, payload),
onSuccess: () => qc.invalidateQueries({ queryKey: ["admin", "providers"] }),
});
}
export function useUpdateProvider() { export function useUpdateProvider() {
const { token } = useAuthContext(); const { token } = useAuthContext();
const qc = useQueryClient(); const qc = useQueryClient();
return useMutation({ return useMutation({
mutationFn: ({ mutationFn: ({
type, id,
payload, payload,
}: { }: {
type: string; id: string;
payload: { config_json: Record<string, string>; enabled: boolean }; payload: { config_json: Record<string, string>; enabled: boolean };
}) => api.admin.providers.updateProvider(token!, type, payload), }) => api.admin.providers.updateProvider(token!, id, payload),
onSuccess: () => qc.invalidateQueries({ queryKey: ["admin", "providers"] }), onSuccess: () => qc.invalidateQueries({ queryKey: ["admin", "providers"] }),
}); });
} }
@@ -32,8 +46,8 @@ export function useDeleteProvider() {
const { token } = useAuthContext(); const { token } = useAuthContext();
const qc = useQueryClient(); const qc = useQueryClient();
return useMutation({ return useMutation({
mutationFn: (type: string) => mutationFn: (id: string) =>
api.admin.providers.deleteProvider(token!, type), api.admin.providers.deleteProvider(token!, id),
onSuccess: () => qc.invalidateQueries({ queryKey: ["admin", "providers"] }), onSuccess: () => qc.invalidateQueries({ queryKey: ["admin", "providers"] }),
}); });
} }
@@ -41,12 +55,9 @@ export function useDeleteProvider() {
export function useTestProvider() { export function useTestProvider() {
const { token } = useAuthContext(); const { token } = useAuthContext();
return useMutation({ return useMutation({
mutationFn: ({ mutationFn: (payload: {
type, provider_type: string;
payload, config_json: Record<string, string>;
}: { }) => api.admin.providers.testProvider(token!, payload),
type: string;
payload: { config_json: Record<string, string>; enabled: boolean };
}) => api.admin.providers.testProvider(token!, type, payload),
}); });
} }

View File

@@ -0,0 +1,28 @@
"use client";
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { api } from "@/lib/api";
import { useAuthContext } from "@/context/auth-context";
import type { AdminSettings } from "@/lib/types";
export function useAdminSettings() {
const { token } = useAuthContext();
return useQuery({
queryKey: ["admin", "settings"],
queryFn: () => api.admin.getSettings(token!),
enabled: !!token,
staleTime: 5 * 60 * 1000,
});
}
export function useUpdateAdminSettings() {
const { token } = useAuthContext();
const queryClient = useQueryClient();
return useMutation({
mutationFn: (patch: Partial<AdminSettings>) =>
api.admin.updateSettings(token!, patch),
onSuccess: (data: AdminSettings) => {
queryClient.setQueryData(["admin", "settings"], data);
},
});
}

View File

@@ -16,14 +16,21 @@ export function useCurrentUser() {
} }
export function useLogin() { export function useLogin() {
const { setToken } = useAuthContext(); const { setTokens } = useAuthContext();
const router = useRouter(); const router = useRouter();
const queryClient = useQueryClient(); const queryClient = useQueryClient();
return useMutation({ return useMutation({
mutationFn: ({ email, password }: { email: string; password: string }) => mutationFn: ({
api.auth.login(email, password), email,
onSuccess: (data) => { password,
setToken(data.access_token); rememberMe,
}: {
email: string;
password: string;
rememberMe: boolean;
}) => api.auth.login(email, password, rememberMe),
onSuccess: (data, { rememberMe }) => {
setTokens(data.access_token, data.refresh_token ?? null, rememberMe);
queryClient.invalidateQueries({ queryKey: ["me"] }); queryClient.invalidateQueries({ queryKey: ["me"] });
router.push("/dashboard"); router.push("/dashboard");
}, },
@@ -31,14 +38,14 @@ export function useLogin() {
} }
export function useRegister() { export function useRegister() {
const { setToken } = useAuthContext(); const { setTokens } = useAuthContext();
const router = useRouter(); const router = useRouter();
const queryClient = useQueryClient(); const queryClient = useQueryClient();
return useMutation({ return useMutation({
mutationFn: ({ email, password }: { email: string; password: string }) => mutationFn: ({ email, password }: { email: string; password: string }) =>
api.auth.register(email, password), api.auth.register(email, password),
onSuccess: (data) => { onSuccess: (data) => {
setToken(data.access_token); setTokens(data.access_token, null, false);
queryClient.invalidateQueries({ queryKey: ["me"] }); queryClient.invalidateQueries({ queryKey: ["me"] });
router.push("/dashboard"); router.push("/dashboard");
}, },
@@ -46,13 +53,13 @@ export function useRegister() {
} }
export function useLogout() { export function useLogout() {
const { token, setToken } = useAuthContext(); const { token, setTokens } = useAuthContext();
const router = useRouter(); const router = useRouter();
const queryClient = useQueryClient(); const queryClient = useQueryClient();
return useMutation({ return useMutation({
mutationFn: () => (token ? api.auth.logout(token) : Promise.resolve()), mutationFn: () => (token ? api.auth.logout(token) : Promise.resolve()),
onSettled: () => { onSettled: () => {
setToken(null); setTokens(null, null, false);
queryClient.clear(); queryClient.clear();
router.push("/login"); router.push("/login");
}, },

View File

@@ -9,7 +9,9 @@ import type {
ProgrammingBlock, ProgrammingBlock,
MediaFilter, MediaFilter,
RecyclePolicy, RecyclePolicy,
Weekday,
} from "@/lib/types"; } from "@/lib/types";
import { WEEKDAYS } from "@/lib/types";
export const WEBHOOK_PRESETS = { export const WEBHOOK_PRESETS = {
discord: `{ discord: `{
@@ -54,11 +56,17 @@ export function defaultBlock(startMins = 20 * 60, durationMins = 60): Programmin
}; };
} }
function emptyDayBlocks(): Record<Weekday, ProgrammingBlock[]> {
const result = {} as Record<Weekday, ProgrammingBlock[]>;
for (const d of WEEKDAYS) result[d] = [];
return result;
}
export function useChannelForm(channel: ChannelResponse | null) { export function useChannelForm(channel: ChannelResponse | null) {
const [name, setName] = useState(""); const [name, setName] = useState("");
const [description, setDescription] = useState(""); const [description, setDescription] = useState("");
const [timezone, setTimezone] = useState("UTC"); const [timezone, setTimezone] = useState("UTC");
const [blocks, setBlocks] = useState<ProgrammingBlock[]>([]); const [dayBlocks, setDayBlocks] = useState<Record<Weekday, ProgrammingBlock[]>>(emptyDayBlocks);
const [recyclePolicy, setRecyclePolicy] = useState<RecyclePolicy>({ const [recyclePolicy, setRecyclePolicy] = useState<RecyclePolicy>({
cooldown_days: null, cooldown_days: null,
cooldown_generations: null, cooldown_generations: null,
@@ -84,7 +92,10 @@ export function useChannelForm(channel: ChannelResponse | null) {
setName(channel.name); setName(channel.name);
setDescription(channel.description ?? ""); setDescription(channel.description ?? "");
setTimezone(channel.timezone); setTimezone(channel.timezone);
setBlocks(channel.schedule_config.blocks); setDayBlocks({
...emptyDayBlocks(),
...channel.schedule_config.day_blocks,
});
setRecyclePolicy(channel.recycle_policy); setRecyclePolicy(channel.recycle_policy);
setAutoSchedule(channel.auto_schedule); setAutoSchedule(channel.auto_schedule);
setAccessMode(channel.access_mode ?? "public"); setAccessMode(channel.access_mode ?? "public");
@@ -110,20 +121,23 @@ export function useChannelForm(channel: ChannelResponse | null) {
} }
}, [channel]); }, [channel]);
const addBlock = (startMins = 20 * 60, durationMins = 60) => { const addBlock = (day: Weekday, startMins = 20 * 60, durationMins = 60) => {
const block = defaultBlock(startMins, durationMins); const block = defaultBlock(startMins, durationMins);
setBlocks((prev) => [...prev, block]); setDayBlocks((prev) => ({ ...prev, [day]: [...(prev[day] ?? []), block] }));
setSelectedBlockId(block.id); setSelectedBlockId(block.id);
}; };
const updateBlock = (idx: number, block: ProgrammingBlock) => const updateBlock = (day: Weekday, idx: number, block: ProgrammingBlock) =>
setBlocks((prev) => prev.map((b, i) => (i === idx ? block : b))); setDayBlocks((prev) => ({
...prev,
[day]: (prev[day] ?? []).map((b, i) => (i === idx ? block : b)),
}));
const removeBlock = (idx: number) => { const removeBlock = (day: Weekday, idx: number) => {
setBlocks((prev) => { setDayBlocks((prev) => {
const next = prev.filter((_, i) => i !== idx); const dayArr = prev[day] ?? [];
if (selectedBlockId === prev[idx].id) setSelectedBlockId(null); if (selectedBlockId === dayArr[idx]?.id) setSelectedBlockId(null);
return next; return { ...prev, [day]: dayArr.filter((_, i) => i !== idx) };
}); });
}; };
@@ -147,8 +161,8 @@ export function useChannelForm(channel: ChannelResponse | null) {
webhookFormat, setWebhookFormat, webhookFormat, setWebhookFormat,
webhookBodyTemplate, setWebhookBodyTemplate, webhookBodyTemplate, setWebhookBodyTemplate,
webhookHeaders, setWebhookHeaders, webhookHeaders, setWebhookHeaders,
// Blocks // Blocks (day-keyed)
blocks, setBlocks, dayBlocks, setDayBlocks,
selectedBlockId, setSelectedBlockId, selectedBlockId, setSelectedBlockId,
recyclePolicy, setRecyclePolicy, recyclePolicy, setRecyclePolicy,
addBlock, addBlock,

View File

@@ -117,3 +117,69 @@ export function useEpg(channelId: string, from?: string, until?: string, channel
enabled: !!channelId, enabled: !!channelId,
}); });
} }
export function useConfigHistory(channelId: string) {
const { token } = useAuthContext();
return useQuery({
queryKey: ["config-history", channelId],
queryFn: () => api.channels.listConfigHistory(channelId, token!),
enabled: !!token && !!channelId,
});
}
export function usePinSnapshot() {
const { token } = useAuthContext();
const qc = useQueryClient();
return useMutation({
mutationFn: ({ channelId, snapId, label }: { channelId: string; snapId: string; label: string | null }) =>
api.channels.patchConfigSnapshot(channelId, snapId, label, token!),
onSuccess: (_, { channelId }) => qc.invalidateQueries({ queryKey: ["config-history", channelId] }),
onError: (e: Error) => toast.error(e.message),
});
}
export function useRestoreConfig() {
const { token } = useAuthContext();
const qc = useQueryClient();
return useMutation({
mutationFn: ({ channelId, snapId }: { channelId: string; snapId: string }) =>
api.channels.restoreConfigSnapshot(channelId, snapId, token!),
onSuccess: (_, { channelId }) => {
qc.invalidateQueries({ queryKey: ["channels"] });
qc.invalidateQueries({ queryKey: ["config-history", channelId] });
},
onError: (e: Error) => toast.error(e.message),
});
}
export function useScheduleHistory(channelId: string) {
const { token } = useAuthContext();
return useQuery({
queryKey: ["schedule-history", channelId],
queryFn: () => api.channels.listScheduleHistory(channelId, token!),
enabled: !!token && !!channelId,
});
}
export function useScheduleGeneration(channelId: string, genId: string | null) {
const { token } = useAuthContext();
return useQuery({
queryKey: ["schedule-generation", channelId, genId],
queryFn: () => api.channels.getScheduleGeneration(channelId, genId!, token!),
enabled: !!token && !!channelId && genId !== null,
});
}
export function useRollbackSchedule() {
const { token } = useAuthContext();
const qc = useQueryClient();
return useMutation({
mutationFn: ({ channelId, genId }: { channelId: string; genId: string }) =>
api.channels.rollbackSchedule(channelId, genId, token!),
onSuccess: (_, { channelId }) => {
qc.invalidateQueries({ queryKey: ["schedule-history", channelId] });
qc.invalidateQueries({ queryKey: ["schedule", channelId] });
},
onError: (e: Error) => toast.error(e.message),
});
}

View File

@@ -4,6 +4,8 @@ import { useState } from "react";
import { useQueryClient } from "@tanstack/react-query"; import { useQueryClient } from "@tanstack/react-query";
import { api } from "@/lib/api"; import { api } from "@/lib/api";
import type { ChannelImportData } from "@/app/(main)/dashboard/components/import-channel-dialog"; import type { ChannelImportData } from "@/app/(main)/dashboard/components/import-channel-dialog";
import { WEEKDAYS } from "@/lib/types";
import type { Weekday } from "@/lib/types";
export function useImportChannel(token: string | null) { export function useImportChannel(token: string | null) {
const queryClient = useQueryClient(); const queryClient = useQueryClient();
@@ -26,7 +28,11 @@ export function useImportChannel(token: string | null) {
await api.channels.update( await api.channels.update(
created.id, created.id,
{ {
schedule_config: { blocks: data.blocks }, schedule_config: {
day_blocks: Object.fromEntries(
WEEKDAYS.map(d => [d, d === 'monday' ? data.blocks : []])
) as Record<Weekday, typeof data.blocks>,
},
recycle_policy: data.recycle_policy, recycle_policy: data.recycle_policy,
}, },
token, token,

View File

@@ -0,0 +1,34 @@
"use client";
import { useQuery } from "@tanstack/react-query";
import { api } from "@/lib/api";
import { useAuthContext } from "@/context/auth-context";
export interface LibrarySearchParams {
q?: string;
type?: string;
series?: string[];
collection?: string;
provider?: string;
decade?: number;
min_duration?: number;
max_duration?: number;
genres?: string[];
offset?: number;
limit?: number;
}
/**
* Paginated library search — always enabled, DB-backed (fast).
* Separate from useLibraryItems in use-library.ts which is snapshot-based
* and used only for block editor filter preview.
*/
export function useLibrarySearch(params: LibrarySearchParams) {
const { token } = useAuthContext();
return useQuery({
queryKey: ["library", "search", params],
queryFn: () => api.library.itemsPage(token!, params),
enabled: !!token,
staleTime: 2 * 60 * 1000,
});
}

View File

@@ -0,0 +1,14 @@
import { useQuery } from "@tanstack/react-query";
import { api } from "@/lib/api";
import { useAuthContext } from "@/context/auth-context";
import type { SeasonSummary } from "@/lib/types";
export function useLibrarySeasons(seriesName: string | null, provider?: string) {
const { token } = useAuthContext();
return useQuery<SeasonSummary[]>({
queryKey: ["library", "seasons", seriesName, provider],
queryFn: () => api.library.seasons(token!, seriesName!, provider),
enabled: !!token && !!seriesName,
staleTime: 30_000,
});
}

View File

@@ -0,0 +1,20 @@
import { useQuery } from "@tanstack/react-query";
import { api } from "@/lib/api";
import { useAuthContext } from "@/context/auth-context";
import type { ShowSummary } from "@/lib/types";
export interface ShowsFilter {
q?: string;
provider?: string;
genres?: string[];
}
export function useLibraryShows(filter: ShowsFilter = {}) {
const { token } = useAuthContext();
return useQuery<ShowSummary[]>({
queryKey: ["library", "shows", filter],
queryFn: () => api.library.shows(token!, filter),
enabled: !!token,
staleTime: 30_000,
});
}

View File

@@ -0,0 +1,28 @@
"use client";
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { api } from "@/lib/api";
import { useAuthContext } from "@/context/auth-context";
export function useLibrarySyncStatus() {
const { token } = useAuthContext();
return useQuery({
queryKey: ["library", "sync"],
queryFn: () => api.library.syncStatus(token!),
enabled: !!token,
staleTime: 30 * 1000,
refetchInterval: 10 * 1000,
});
}
export function useTriggerSync() {
const { token } = useAuthContext();
const queryClient = useQueryClient();
return useMutation({
mutationFn: () => api.library.triggerSync(token!),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["library", "search"] });
queryClient.invalidateQueries({ queryKey: ["library", "sync"] });
},
});
}

View File

@@ -17,6 +17,13 @@ import type {
ActivityEvent, ActivityEvent,
ProviderConfig, ProviderConfig,
ProviderTestResult, ProviderTestResult,
ConfigSnapshot,
ScheduleHistoryEntry,
LibrarySyncLogEntry,
PagedLibraryResponse,
AdminSettings,
ShowSummary,
SeasonSummary,
} from "@/lib/types"; } from "@/lib/types";
const API_BASE = const API_BASE =
@@ -32,6 +39,23 @@ export class ApiRequestError extends Error {
} }
} }
// Called by AuthProvider when refreshToken changes — enables transparent 401 recovery
let refreshCallback: (() => Promise<string | null>) | null = null;
let refreshInFlight: Promise<string | null> | null = null;
export function setRefreshCallback(cb: (() => Promise<string | null>) | null) {
refreshCallback = cb;
}
async function attemptRefresh(): Promise<string | null> {
if (!refreshCallback) return null;
if (refreshInFlight) return refreshInFlight;
refreshInFlight = refreshCallback().finally(() => {
refreshInFlight = null;
});
return refreshInFlight;
}
async function request<T>( async function request<T>(
path: string, path: string,
options: RequestInit & { token?: string } = {}, options: RequestInit & { token?: string } = {},
@@ -48,6 +72,35 @@ async function request<T>(
const res = await fetch(`${API_BASE}${path}`, { ...init, headers }); const res = await fetch(`${API_BASE}${path}`, { ...init, headers });
// Transparent refresh: on 401, try to get a new access token and retry once.
// Skip for the refresh endpoint itself to avoid infinite loops.
if (res.status === 401 && path !== "/auth/refresh") {
const newToken = await attemptRefresh();
if (newToken) {
const retryHeaders = new Headers(init.headers);
retryHeaders.set("Authorization", `Bearer ${newToken}`);
if (init.body && !retryHeaders.has("Content-Type")) {
retryHeaders.set("Content-Type", "application/json");
}
const retryRes = await fetch(`${API_BASE}${path}`, {
...init,
headers: retryHeaders,
});
if (!retryRes.ok) {
let message = retryRes.statusText;
try {
const body = await retryRes.json();
message = body.message ?? body.error ?? message;
} catch {
// ignore parse error
}
throw new ApiRequestError(retryRes.status, message);
}
if (retryRes.status === 204) return null as T;
return retryRes.json() as Promise<T>;
}
}
if (!res.ok) { if (!res.ok) {
let message = res.statusText; let message = res.statusText;
try { try {
@@ -75,10 +128,16 @@ export const api = {
body: JSON.stringify({ email, password }), body: JSON.stringify({ email, password }),
}), }),
login: (email: string, password: string) => login: (email: string, password: string, rememberMe = false) =>
request<TokenResponse>("/auth/login", { request<TokenResponse>("/auth/login", {
method: "POST", method: "POST",
body: JSON.stringify({ email, password }), body: JSON.stringify({ email, password, remember_me: rememberMe }),
}),
refresh: (refreshToken: string) =>
request<TokenResponse>("/auth/refresh", {
method: "POST",
body: JSON.stringify({ refresh_token: refreshToken }),
}), }),
logout: (token: string) => logout: (token: string) =>
@@ -110,6 +169,34 @@ export const api = {
delete: (id: string, token: string) => delete: (id: string, token: string) =>
request<void>(`/channels/${id}`, { method: "DELETE", token }), request<void>(`/channels/${id}`, { method: "DELETE", token }),
listConfigHistory: (channelId: string, token: string) =>
request<ConfigSnapshot[]>(`/channels/${channelId}/config/history`, { token }),
patchConfigSnapshot: (channelId: string, snapId: string, label: string | null, token: string) =>
request<ConfigSnapshot>(`/channels/${channelId}/config/history/${snapId}`, {
method: "PATCH",
body: JSON.stringify({ label }),
token,
}),
restoreConfigSnapshot: (channelId: string, snapId: string, token: string) =>
request<ChannelResponse>(`/channels/${channelId}/config/history/${snapId}/restore`, {
method: "POST",
token,
}),
listScheduleHistory: (channelId: string, token: string) =>
request<ScheduleHistoryEntry[]>(`/channels/${channelId}/schedule/history`, { token }),
getScheduleGeneration: (channelId: string, genId: string, token: string) =>
request<ScheduleResponse>(`/channels/${channelId}/schedule/history/${genId}`, { token }),
rollbackSchedule: (channelId: string, genId: string, token: string) =>
request<ScheduleResponse>(`/channels/${channelId}/schedule/history/${genId}/rollback`, {
method: "POST",
token,
}),
}, },
library: { library: {
@@ -153,11 +240,58 @@ export const api = {
if (provider) params.set("provider", provider); if (provider) params.set("provider", provider);
return request<LibraryItemResponse[]>(`/library/items?${params}`, { token }); return request<LibraryItemResponse[]>(`/library/items?${params}`, { token });
}, },
syncStatus: (token: string): Promise<LibrarySyncLogEntry[]> =>
request('/library/sync/status', { token }),
triggerSync: (token: string): Promise<void> =>
request('/library/sync', { method: 'POST', token }),
itemsPage: (
token: string,
filter: Partial<{
q: string; type: string; series: string[]; genres: string[]; collection: string;
provider: string; decade: number; min_duration: number; max_duration: number;
offset: number; limit: number;
}>
): Promise<PagedLibraryResponse> => {
const params = new URLSearchParams();
if (filter.q) params.set('q', filter.q);
if (filter.type) params.set('type', filter.type);
if (filter.series) filter.series.forEach(s => params.append('series[]', s));
if (filter.genres) filter.genres.forEach(g => params.append('genres[]', g));
if (filter.collection) params.set('collection', filter.collection);
if (filter.provider) params.set('provider', filter.provider);
if (filter.decade != null) params.set('decade', String(filter.decade));
if (filter.min_duration != null) params.set('min_duration', String(filter.min_duration));
if (filter.max_duration != null) params.set('max_duration', String(filter.max_duration));
params.set('offset', String(filter.offset ?? 0));
params.set('limit', String(filter.limit ?? 50));
return request(`/library/items?${params}`, { token });
},
shows: (token: string, filter?: { q?: string; provider?: string; genres?: string[] }): Promise<ShowSummary[]> => {
const params = new URLSearchParams();
if (filter?.q) params.set('q', filter.q);
if (filter?.provider) params.set('provider', filter.provider);
filter?.genres?.forEach(g => params.append('genres[]', g));
const qs = params.toString();
return request(`/library/shows${qs ? `?${qs}` : ''}`, { token });
},
seasons: (token: string, seriesName: string, provider?: string): Promise<SeasonSummary[]> => {
const params = new URLSearchParams();
if (provider) params.set('provider', provider);
const qs = params.toString();
return request(`/library/shows/${encodeURIComponent(seriesName)}/seasons${qs ? `?${qs}` : ''}`, { token });
},
}, },
files: { files: {
rescan: (token: string) => rescan: (token: string, provider?: string) => {
request<{ items_found: number }>("/files/rescan", { method: "POST", token }), const qs = provider ? `?provider=${encodeURIComponent(provider)}` : "";
return request<{ items_found: number }>(`/files/rescan${qs}`, { method: "POST", token });
},
}, },
transcode: { transcode: {
@@ -182,30 +316,50 @@ export const api = {
activity: (token: string) => activity: (token: string) =>
request<ActivityEvent[]>("/admin/activity", { token }), request<ActivityEvent[]>("/admin/activity", { token }),
getSettings: (token: string): Promise<AdminSettings> =>
request('/admin/settings', { token }),
updateSettings: (token: string, patch: Partial<AdminSettings>): Promise<AdminSettings> =>
request('/admin/settings', {
method: 'PUT',
token,
body: JSON.stringify(patch),
headers: { 'Content-Type': 'application/json' },
}),
providers: { providers: {
getProviders: (token: string) => getProviders: (token: string) =>
request<ProviderConfig[]>("/admin/providers", { token }), request<ProviderConfig[]>("/admin/providers", { token }),
createProvider: (
token: string,
payload: { id: string; provider_type: string; config_json: Record<string, string>; enabled: boolean },
) =>
request<ProviderConfig>("/admin/providers", {
method: "POST",
body: JSON.stringify(payload),
token,
}),
updateProvider: ( updateProvider: (
token: string, token: string,
type: string, id: string,
payload: { config_json: Record<string, string>; enabled: boolean }, payload: { config_json: Record<string, string>; enabled: boolean },
) => ) =>
request<ProviderConfig>(`/admin/providers/${type}`, { request<ProviderConfig>(`/admin/providers/${id}`, {
method: "PUT", method: "PUT",
body: JSON.stringify(payload), body: JSON.stringify(payload),
token, token,
}), }),
deleteProvider: (token: string, type: string) => deleteProvider: (token: string, id: string) =>
request<void>(`/admin/providers/${type}`, { method: "DELETE", token }), request<void>(`/admin/providers/${id}`, { method: "DELETE", token }),
testProvider: ( testProvider: (
token: string, token: string,
type: string, payload: { provider_type: string; config_json: Record<string, string> },
payload: { config_json: Record<string, string>; enabled: boolean },
) => ) =>
request<ProviderTestResult>(`/admin/providers/${type}/test`, { request<ProviderTestResult>("/admin/providers/test", {
method: "POST", method: "POST",
body: JSON.stringify(payload), body: JSON.stringify(payload),
token, token,

View File

@@ -5,7 +5,7 @@ export function exportChannel(channel: ChannelResponse): void {
name: channel.name, name: channel.name,
description: channel.description ?? undefined, description: channel.description ?? undefined,
timezone: channel.timezone, timezone: channel.timezone,
blocks: channel.schedule_config.blocks, day_blocks: channel.schedule_config.day_blocks,
recycle_policy: channel.recycle_policy, recycle_policy: channel.recycle_policy,
}; };
const blob = new Blob([JSON.stringify(payload, null, 2)], { const blob = new Blob([JSON.stringify(payload, null, 2)], {

View File

@@ -1,4 +1,10 @@
import { z } from "zod"; import { z } from "zod";
import { WEEKDAYS } from "@/lib/types";
import type { Weekday } from "@/lib/types";
const weekdaySchema = z.enum([
'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday',
]);
export const mediaFilterSchema = z.object({ export const mediaFilterSchema = z.object({
content_type: z.enum(["movie", "episode", "short"]).nullable().optional(), content_type: z.enum(["movie", "episode", "short"]).nullable().optional(),
@@ -53,7 +59,10 @@ export const channelFormSchema = z.object({
name: z.string().min(1, "Name is required"), name: z.string().min(1, "Name is required"),
timezone: z.string().min(1, "Timezone is required"), timezone: z.string().min(1, "Timezone is required"),
description: z.string().optional(), description: z.string().optional(),
blocks: z.array(blockSchema), day_blocks: z.record(weekdaySchema, z.array(blockSchema))
.default(() =>
Object.fromEntries(WEEKDAYS.map(d => [d, []])) as unknown as Record<Weekday, z.infer<typeof blockSchema>[]>
),
recycle_policy: z.object({ recycle_policy: z.object({
cooldown_days: z.number().int().min(0).nullable().optional(), cooldown_days: z.number().int().min(0).nullable().optional(),
cooldown_generations: z.number().int().min(0).nullable().optional(), cooldown_generations: z.number().int().min(0).nullable().optional(),

View File

@@ -91,8 +91,35 @@ export interface ProgrammingBlock {
access_password?: string; access_password?: string;
} }
export type Weekday =
| 'monday' | 'tuesday' | 'wednesday' | 'thursday'
| 'friday' | 'saturday' | 'sunday'
export const WEEKDAYS: Weekday[] = [
'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday',
]
export const WEEKDAY_LABELS: Record<Weekday, string> = {
monday: 'Mon', tuesday: 'Tue', wednesday: 'Wed', thursday: 'Thu',
friday: 'Fri', saturday: 'Sat', sunday: 'Sun',
}
export interface ScheduleConfig { export interface ScheduleConfig {
blocks: ProgrammingBlock[]; day_blocks: Record<Weekday, ProgrammingBlock[]>
}
export interface ConfigSnapshot {
id: string
version_num: number
label: string | null
created_at: string
}
export interface ScheduleHistoryEntry {
id: string
generation: number
valid_from: string
valid_until: string
} }
// Config // Config
@@ -135,6 +162,7 @@ export interface ConfigResponse {
} }
export interface ProviderConfig { export interface ProviderConfig {
id: string;
provider_type: string; provider_type: string;
config_json: Record<string, string>; config_json: Record<string, string>;
enabled: boolean; enabled: boolean;
@@ -151,6 +179,7 @@ export interface TokenResponse {
access_token: string; access_token: string;
token_type: string; token_type: string;
expires_in: number; expires_in: number;
refresh_token?: string;
} }
export interface UserResponse { export interface UserResponse {
@@ -263,3 +292,47 @@ export interface CurrentBroadcastResponse {
offset_secs: number; offset_secs: number;
block_access_mode: AccessMode; block_access_mode: AccessMode;
} }
// Library management
// Note: LibraryItemResponse is already defined in this file (search for it above).
// LibraryItemFull extends it with the extra fields returned by the DB-backed endpoint.
export interface LibraryItemFull extends LibraryItemResponse {
thumbnail_url?: string | null;
collection_id?: string | null;
collection_name?: string | null;
}
export interface ShowSummary {
series_name: string;
episode_count: number;
season_count: number;
thumbnail_url?: string | null;
genres: string[];
}
export interface SeasonSummary {
season_number: number;
episode_count: number;
thumbnail_url?: string | null;
}
export interface PagedLibraryResponse {
items: LibraryItemFull[];
total: number;
}
export interface LibrarySyncLogEntry {
id: number;
provider_id: string;
started_at: string;
finished_at?: string | null;
items_found: number;
status: 'running' | 'done' | 'error';
error_msg?: string | null;
}
export interface AdminSettings {
library_sync_interval_hours: number;
[key: string]: unknown;
}