feat: add local files provider with indexing and rescan functionality

- Implemented LocalFilesProvider to manage local video files.
- Added LocalIndex for in-memory and SQLite-backed indexing of video files.
- Introduced scanning functionality to detect video files and extract metadata.
- Added API endpoints for listing collections, genres, and series based on provider capabilities.
- Enhanced existing routes to check for provider capabilities before processing requests.
- Updated frontend to utilize provider capabilities for conditional rendering of UI elements.
- Implemented rescan functionality to refresh the local files index.
- Added database migration for local files index schema.
This commit is contained in:
2026-03-14 03:44:32 +01:00
parent 9b6bcfc566
commit 8f42164bce
30 changed files with 1033 additions and 59 deletions

View File

@@ -87,6 +87,7 @@ dependencies = [
"thiserror 2.0.17", "thiserror 2.0.17",
"time", "time",
"tokio", "tokio",
"tokio-util",
"tower", "tower",
"tower-http", "tower-http",
"tracing", "tracing",
@@ -1372,6 +1373,7 @@ dependencies = [
"async-nats", "async-nats",
"async-trait", "async-trait",
"axum-extra", "axum-extra",
"base64 0.22.1",
"chrono", "chrono",
"domain", "domain",
"futures-core", "futures-core",
@@ -1389,6 +1391,7 @@ dependencies = [
"tracing", "tracing",
"url", "url",
"uuid", "uuid",
"walkdir",
] ]
[[package]] [[package]]
@@ -2468,6 +2471,15 @@ version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]] [[package]]
name = "schannel" name = "schannel"
version = "0.1.28" version = "0.1.28"
@@ -3539,6 +3551,16 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "walkdir"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [
"same-file",
"winapi-util",
]
[[package]] [[package]]
name = "want" name = "want"
version = "0.3.1" version = "0.3.1"
@@ -3675,6 +3697,15 @@ dependencies = [
"wasite", "wasite",
] ]
[[package]]
name = "winapi-util"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "windows-core" name = "windows-core"
version = "0.62.2" version = "0.62.2"

View File

@@ -10,8 +10,8 @@ FROM debian:bookworm-slim
WORKDIR /app WORKDIR /app
# Install OpenSSL (required for many Rust networking crates) and CA certificates # Install OpenSSL, CA certs, and ffmpeg (provides ffprobe for local-files duration scanning)
RUN apt-get update && apt-get install -y libssl3 ca-certificates && rm -rf /var/lib/apt/lists/* RUN apt-get update && apt-get install -y libssl3 ca-certificates ffmpeg && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/api . COPY --from=builder /app/target/release/api .

View File

@@ -11,6 +11,7 @@ postgres = ["infra/postgres"]
auth-oidc = ["infra/auth-oidc"] auth-oidc = ["infra/auth-oidc"]
auth-jwt = ["infra/auth-jwt"] auth-jwt = ["infra/auth-jwt"]
jellyfin = ["infra/jellyfin"] jellyfin = ["infra/jellyfin"]
local-files = ["infra/local-files", "dep:tokio-util"]
[dependencies] [dependencies]
k-core = { git = "https://git.gabrielkaszewski.dev/GKaszewski/k-core", features = [ k-core = { git = "https://git.gabrielkaszewski.dev/GKaszewski/k-core", features = [
@@ -51,3 +52,4 @@ tracing = "0.1"
async-trait = "0.1" async-trait = "0.1"
dotenvy = "0.15.7" dotenvy = "0.15.7"
time = "0.3" time = "0.3"
tokio-util = { version = "0.7", features = ["io"], optional = true }

View File

@@ -3,6 +3,7 @@
//! Loads configuration from environment variables. //! Loads configuration from environment variables.
use std::env; use std::env;
use std::path::PathBuf;
/// Application configuration loaded from environment variables /// Application configuration loaded from environment variables
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -40,6 +41,9 @@ pub struct Config {
pub jellyfin_api_key: Option<String>, pub jellyfin_api_key: Option<String>,
pub jellyfin_user_id: Option<String>, pub jellyfin_user_id: Option<String>,
/// Root directory for the local-files provider. Set `LOCAL_FILES_DIR` to enable.
pub local_files_dir: Option<PathBuf>,
/// Public base URL of this API server (used to build IPTV stream URLs). /// Public base URL of this API server (used to build IPTV stream URLs).
pub base_url: String, pub base_url: String,
} }
@@ -114,6 +118,8 @@ impl Config {
let jellyfin_api_key = env::var("JELLYFIN_API_KEY").ok(); let jellyfin_api_key = env::var("JELLYFIN_API_KEY").ok();
let jellyfin_user_id = env::var("JELLYFIN_USER_ID").ok(); let jellyfin_user_id = env::var("JELLYFIN_USER_ID").ok();
let local_files_dir = env::var("LOCAL_FILES_DIR").ok().map(PathBuf::from);
let base_url = env::var("BASE_URL") let base_url = env::var("BASE_URL")
.unwrap_or_else(|_| format!("http://localhost:{}", port)); .unwrap_or_else(|_| format!("http://localhost:{}", port));
@@ -140,6 +146,7 @@ impl Config {
jellyfin_base_url, jellyfin_base_url,
jellyfin_api_key, jellyfin_api_key,
jellyfin_user_id, jellyfin_user_id,
local_files_dir,
base_url, base_url,
} }
} }

View File

@@ -46,6 +46,7 @@ pub struct TokenResponse {
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub struct ConfigResponse { pub struct ConfigResponse {
pub allow_registration: bool, pub allow_registration: bool,
pub provider_capabilities: domain::ProviderCapabilities,
} }
// ============================================================================ // ============================================================================

View File

@@ -35,6 +35,12 @@ pub enum ApiError {
#[error("auth_required")] #[error("auth_required")]
AuthRequired, AuthRequired,
#[error("Not found: {0}")]
NotFound(String),
#[error("Not implemented: {0}")]
NotImplemented(String),
} }
/// Error response body /// Error response body
@@ -132,6 +138,22 @@ impl IntoResponse for ApiError {
details: None, details: None,
}, },
), ),
ApiError::NotFound(msg) => (
StatusCode::NOT_FOUND,
ErrorResponse {
error: "Not found".to_string(),
details: Some(msg.clone()),
},
),
ApiError::NotImplemented(msg) => (
StatusCode::NOT_IMPLEMENTED,
ErrorResponse {
error: "Not implemented".to_string(),
details: Some(msg.clone()),
},
),
}; };
(status, Json(error_response)).into_response() (status, Json(error_response)).into_response()
@@ -146,6 +168,14 @@ impl ApiError {
pub fn internal(msg: impl Into<String>) -> Self { pub fn internal(msg: impl Into<String>) -> Self {
Self::Internal(msg.into()) Self::Internal(msg.into())
} }
pub fn not_found(msg: impl Into<String>) -> Self {
Self::NotFound(msg.into())
}
pub fn not_implemented(msg: impl Into<String>) -> Self {
Self::NotImplemented(msg.into())
}
} }
/// Result type alias for API handlers /// Result type alias for API handlers

View File

@@ -10,7 +10,7 @@ use axum::http::{HeaderName, HeaderValue};
use std::sync::Arc; use std::sync::Arc;
use tower_http::cors::{AllowHeaders, AllowMethods, AllowOrigin, CorsLayer}; use tower_http::cors::{AllowHeaders, AllowMethods, AllowOrigin, CorsLayer};
use domain::{ChannelService, IMediaProvider, ScheduleEngineService, UserService}; use domain::{ChannelService, IMediaProvider, ProviderCapabilities, ScheduleEngineService, StreamingProtocol, UserService};
use infra::factory::{build_channel_repository, build_schedule_repository, build_user_repository}; use infra::factory::{build_channel_repository, build_schedule_repository, build_user_repository};
use infra::run_migrations; use infra::run_migrations;
use k_core::http::server::{ServerConfig, apply_standard_middleware}; use k_core::http::server::{ServerConfig, apply_standard_middleware};
@@ -72,8 +72,52 @@ async fn main() -> anyhow::Result<()> {
let user_service = UserService::new(user_repo); let user_service = UserService::new(user_repo);
let channel_service = ChannelService::new(channel_repo.clone()); let channel_service = ChannelService::new(channel_repo.clone());
// Build media provider — Jellyfin if configured, no-op fallback otherwise. // Build media provider — Jellyfin → local-files → noop, first match wins.
let media_provider: Arc<dyn IMediaProvider> = build_media_provider(&config); #[cfg(feature = "local-files")]
let mut local_index: Option<Arc<infra::LocalIndex>> = None;
let mut maybe_provider: Option<Arc<dyn IMediaProvider>> = None;
#[cfg(feature = "jellyfin")]
if let (Some(base_url), Some(api_key), Some(user_id)) = (
&config.jellyfin_base_url,
&config.jellyfin_api_key,
&config.jellyfin_user_id,
) {
tracing::info!("Media provider: Jellyfin at {}", base_url);
maybe_provider = Some(Arc::new(infra::JellyfinMediaProvider::new(infra::JellyfinConfig {
base_url: base_url.clone(),
api_key: api_key.clone(),
user_id: user_id.clone(),
})));
}
#[cfg(feature = "local-files")]
if maybe_provider.is_none() {
if let Some(dir) = &config.local_files_dir {
if let k_core::db::DatabasePool::Sqlite(ref sqlite_pool) = db_pool {
tracing::info!("Media provider: local files at {:?}", dir);
let lf_cfg = infra::LocalFilesConfig {
root_dir: dir.clone(),
base_url: config.base_url.clone(),
};
let idx = Arc::new(infra::LocalIndex::new(&lf_cfg, sqlite_pool.clone()).await);
local_index = Some(Arc::clone(&idx));
let scan_idx = Arc::clone(&idx);
tokio::spawn(async move { scan_idx.rescan().await; });
maybe_provider = Some(Arc::new(infra::LocalFilesProvider::new(idx, lf_cfg)));
} else {
tracing::warn!("local-files requires SQLite; ignoring LOCAL_FILES_DIR");
}
}
}
let media_provider: Arc<dyn IMediaProvider> = maybe_provider.unwrap_or_else(|| {
tracing::warn!(
"No media provider configured. Set JELLYFIN_BASE_URL / LOCAL_FILES_DIR."
);
Arc::new(NoopMediaProvider)
});
let bg_channel_repo = channel_repo.clone(); let bg_channel_repo = channel_repo.clone();
let schedule_engine = ScheduleEngineService::new( let schedule_engine = ScheduleEngineService::new(
@@ -82,7 +126,8 @@ async fn main() -> anyhow::Result<()> {
schedule_repo, schedule_repo,
); );
let state = AppState::new( #[allow(unused_mut)]
let mut state = AppState::new(
user_service, user_service,
channel_service, channel_service,
schedule_engine, schedule_engine,
@@ -91,6 +136,11 @@ async fn main() -> anyhow::Result<()> {
) )
.await?; .await?;
#[cfg(feature = "local-files")]
{
state.local_index = local_index;
}
let server_config = ServerConfig { let server_config = ServerConfig {
cors_origins: config.cors_allowed_origins.clone(), cors_origins: config.cors_allowed_origins.clone(),
}; };
@@ -141,31 +191,6 @@ async fn main() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
/// Build the media provider from config.
/// Falls back to a no-op provider that returns an informative error when
/// Jellyfin env vars are not set, so other API features still work in dev.
fn build_media_provider(config: &Config) -> Arc<dyn IMediaProvider> {
#[cfg(feature = "jellyfin")]
if let (Some(base_url), Some(api_key), Some(user_id)) = (
&config.jellyfin_base_url,
&config.jellyfin_api_key,
&config.jellyfin_user_id,
) {
tracing::info!("Media provider: Jellyfin at {}", base_url);
return Arc::new(infra::JellyfinMediaProvider::new(infra::JellyfinConfig {
base_url: base_url.clone(),
api_key: api_key.clone(),
user_id: user_id.clone(),
}));
}
tracing::warn!(
"No media provider configured. Set JELLYFIN_BASE_URL, JELLYFIN_API_KEY, \
and JELLYFIN_USER_ID to enable schedule generation."
);
Arc::new(NoopMediaProvider)
}
/// Stand-in provider used when no real media source is configured. /// Stand-in provider used when no real media source is configured.
/// Returns a descriptive error for every call so schedule endpoints fail /// Returns a descriptive error for every call so schedule endpoints fail
/// gracefully rather than panicking at startup. /// gracefully rather than panicking at startup.
@@ -173,14 +198,25 @@ struct NoopMediaProvider;
#[async_trait::async_trait] #[async_trait::async_trait]
impl IMediaProvider for NoopMediaProvider { impl IMediaProvider for NoopMediaProvider {
fn capabilities(&self) -> ProviderCapabilities {
ProviderCapabilities {
collections: false,
series: false,
genres: false,
tags: false,
decade: false,
search: false,
streaming_protocol: StreamingProtocol::DirectFile,
rescan: false,
}
}
async fn fetch_items( async fn fetch_items(
&self, &self,
_: &domain::MediaFilter, _: &domain::MediaFilter,
) -> domain::DomainResult<Vec<domain::MediaItem>> { ) -> domain::DomainResult<Vec<domain::MediaItem>> {
Err(domain::DomainError::InfrastructureError( Err(domain::DomainError::InfrastructureError(
"No media provider configured. Set JELLYFIN_BASE_URL, JELLYFIN_API_KEY, \ "No media provider configured. Set JELLYFIN_BASE_URL or LOCAL_FILES_DIR.".into(),
and JELLYFIN_USER_ID."
.into(),
)) ))
} }

View File

@@ -1,6 +1,5 @@
use axum::{Json, Router, extract::State, routing::get}; use axum::{Json, Router, extract::State, routing::get};
use std::sync::Arc;
use crate::config::Config;
use crate::dto::ConfigResponse; use crate::dto::ConfigResponse;
use crate::state::AppState; use crate::state::AppState;
@@ -8,8 +7,9 @@ pub fn router() -> Router<AppState> {
Router::new().route("/", get(get_config)) Router::new().route("/", get(get_config))
} }
async fn get_config(State(config): State<Arc<Config>>) -> Json<ConfigResponse> { async fn get_config(State(state): State<AppState>) -> Json<ConfigResponse> {
Json(ConfigResponse { Json(ConfigResponse {
allow_registration: config.allow_registration, allow_registration: state.config.allow_registration,
provider_capabilities: state.media_provider.capabilities(),
}) })
} }

View File

@@ -0,0 +1,153 @@
//! Local-file streaming and rescan routes
//!
//! GET /files/stream/:encoded_id — serve a local video file with Range support
//! POST /files/rescan — trigger an index rebuild (auth required)
use axum::{
Router,
extract::{Path, State},
http::{HeaderMap, StatusCode},
response::Response,
routing::{get, post},
};
use crate::{error::ApiError, extractors::CurrentUser, state::AppState};
pub fn router() -> Router<AppState> {
let r = Router::new().route("/stream/{id}", get(stream_file));
#[cfg(feature = "local-files")]
let r = r.route("/rescan", post(trigger_rescan));
r
}
/// Stream a local video file, honouring `Range` headers for seeking.
///
/// The path segment is a base64url-encoded relative path produced by the
/// `LocalFilesProvider`. No authentication required — the ID is not guessable
/// without knowing the filesystem layout.
async fn stream_file(
State(state): State<AppState>,
Path(encoded_id): Path<String>,
headers: HeaderMap,
) -> Result<Response, ApiError> {
#[cfg(feature = "local-files")]
{
use axum::body::Body;
use std::io::SeekFrom;
use tokio::io::{AsyncReadExt as _, AsyncSeekExt as _};
use tokio_util::io::ReaderStream;
let root_dir = state.config.local_files_dir.as_ref().ok_or_else(|| {
ApiError::not_implemented("LOCAL_FILES_DIR not configured")
})?;
let rel = infra::local_files::decode_stream_id(&encoded_id)
.ok_or_else(|| ApiError::validation("invalid stream id"))?;
// Security: canonicalise and verify the path stays inside root.
let full_path = root_dir.join(&rel);
let canonical_root = root_dir
.canonicalize()
.map_err(|e| ApiError::internal(e.to_string()))?;
let canonical = full_path
.canonicalize()
.map_err(|_| ApiError::not_found("file not found"))?;
if !canonical.starts_with(&canonical_root) {
return Err(ApiError::Forbidden("path traversal detected".into()));
}
let mut file = tokio::fs::File::open(&canonical)
.await
.map_err(|_| ApiError::not_found("file not found"))?;
let file_size = file
.metadata()
.await
.map_err(|e| ApiError::internal(e.to_string()))?
.len();
let ext = canonical
.extension()
.and_then(|e| e.to_str())
.unwrap_or("")
.to_lowercase();
let content_type = content_type_for_ext(&ext);
// Parse Range header.
let range = headers
.get(axum::http::header::RANGE)
.and_then(|v| v.to_str().ok())
.and_then(|r| parse_range(r, file_size));
let (start, end, status) = if let Some((s, e)) = range {
(s, e.min(file_size.saturating_sub(1)), StatusCode::PARTIAL_CONTENT)
} else {
(0, file_size.saturating_sub(1), StatusCode::OK)
};
let length = end - start + 1;
file.seek(SeekFrom::Start(start))
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
let stream = ReaderStream::new(file.take(length));
let body = Body::from_stream(stream);
let mut builder = Response::builder()
.status(status)
.header("Content-Type", content_type)
.header("Content-Length", length.to_string())
.header("Accept-Ranges", "bytes");
if status == StatusCode::PARTIAL_CONTENT {
builder = builder.header(
"Content-Range",
format!("bytes {}-{}/{}", start, end, file_size),
);
}
return builder.body(body).map_err(|e| ApiError::internal(e.to_string()));
}
#[cfg(not(feature = "local-files"))]
Err(ApiError::not_implemented("local-files feature not enabled"))
}
/// Trigger a filesystem rescan and return the number of items found.
#[cfg(feature = "local-files")]
async fn trigger_rescan(
State(state): State<AppState>,
CurrentUser(_user): CurrentUser,
) -> Result<axum::Json<serde_json::Value>, ApiError> {
let index = state
.local_index
.as_ref()
.ok_or_else(|| ApiError::not_implemented("no local files provider active"))?;
let count = index.rescan().await;
Ok(axum::Json(serde_json::json!({ "items_found": count })))
}
fn content_type_for_ext(ext: &str) -> &'static str {
match ext {
"mp4" | "m4v" => "video/mp4",
"mkv" => "video/x-matroska",
"avi" => "video/x-msvideo",
"mov" => "video/quicktime",
"webm" => "video/webm",
_ => "application/octet-stream",
}
}
fn parse_range(range: &str, file_size: u64) -> Option<(u64, u64)> {
let range = range.strip_prefix("bytes=")?;
let (start_str, end_str) = range.split_once('-')?;
let start: u64 = start_str.parse().ok()?;
let end: u64 = if end_str.is_empty() {
file_size.saturating_sub(1)
} else {
end_str.parse().ok()?
};
if start > end || start >= file_size {
return None;
}
Some((start, end))
}

View File

@@ -136,6 +136,11 @@ async fn list_collections(
State(state): State<AppState>, State(state): State<AppState>,
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
) -> Result<Json<Vec<CollectionResponse>>, ApiError> { ) -> Result<Json<Vec<CollectionResponse>>, ApiError> {
if !state.media_provider.capabilities().collections {
return Err(ApiError::not_implemented(
"collections not supported by this provider",
));
}
let collections = state.media_provider.list_collections().await?; let collections = state.media_provider.list_collections().await?;
Ok(Json(collections.into_iter().map(Into::into).collect())) Ok(Json(collections.into_iter().map(Into::into).collect()))
} }
@@ -146,6 +151,11 @@ async fn list_series(
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
Query(params): Query<SeriesQuery>, Query(params): Query<SeriesQuery>,
) -> Result<Json<Vec<SeriesResponse>>, ApiError> { ) -> Result<Json<Vec<SeriesResponse>>, ApiError> {
if !state.media_provider.capabilities().series {
return Err(ApiError::not_implemented(
"series not supported by this provider",
));
}
let series = state let series = state
.media_provider .media_provider
.list_series(params.collection.as_deref()) .list_series(params.collection.as_deref())
@@ -159,6 +169,11 @@ async fn list_genres(
CurrentUser(_user): CurrentUser, CurrentUser(_user): CurrentUser,
Query(params): Query<GenresQuery>, Query(params): Query<GenresQuery>,
) -> Result<Json<Vec<String>>, ApiError> { ) -> Result<Json<Vec<String>>, ApiError> {
if !state.media_provider.capabilities().genres {
return Err(ApiError::not_implemented(
"genres not supported by this provider",
));
}
let ct = parse_content_type(params.content_type.as_deref())?; let ct = parse_content_type(params.content_type.as_deref())?;
let genres = state.media_provider.list_genres(ct.as_ref()).await?; let genres = state.media_provider.list_genres(ct.as_ref()).await?;
Ok(Json(genres)) Ok(Json(genres))

View File

@@ -8,6 +8,7 @@ use axum::Router;
pub mod auth; pub mod auth;
pub mod channels; pub mod channels;
pub mod config; pub mod config;
pub mod files;
pub mod iptv; pub mod iptv;
pub mod library; pub mod library;
@@ -17,6 +18,7 @@ pub fn api_v1_router() -> Router<AppState> {
.nest("/auth", auth::router()) .nest("/auth", auth::router())
.nest("/channels", channels::router()) .nest("/channels", channels::router())
.nest("/config", config::router()) .nest("/config", config::router())
.nest("/files", files::router())
.nest("/iptv", iptv::router()) .nest("/iptv", iptv::router())
.nest("/library", library::router()) .nest("/library", library::router())
} }

View File

@@ -25,6 +25,9 @@ pub struct AppState {
#[cfg(feature = "auth-jwt")] #[cfg(feature = "auth-jwt")]
pub jwt_validator: Option<Arc<JwtValidator>>, pub jwt_validator: Option<Arc<JwtValidator>>,
pub config: Arc<Config>, pub config: Arc<Config>,
/// Index for the local-files provider, used by the rescan route.
#[cfg(feature = "local-files")]
pub local_index: Option<Arc<infra::LocalIndex>>,
} }
impl AppState { impl AppState {
@@ -105,6 +108,8 @@ impl AppState {
#[cfg(feature = "auth-jwt")] #[cfg(feature = "auth-jwt")]
jwt_validator, jwt_validator,
config: Arc::new(config), config: Arc::new(config),
#[cfg(feature = "local-files")]
local_index: None,
}) })
} }
} }

View File

@@ -14,7 +14,7 @@ pub mod value_objects;
// Re-export commonly used types // Re-export commonly used types
pub use entities::*; pub use entities::*;
pub use errors::{DomainError, DomainResult}; pub use errors::{DomainError, DomainResult};
pub use ports::{Collection, IMediaProvider, SeriesSummary}; pub use ports::{Collection, IMediaProvider, ProviderCapabilities, SeriesSummary, StreamingProtocol};
pub use repositories::*; pub use repositories::*;
pub use iptv::{generate_m3u, generate_xmltv}; pub use iptv::{generate_m3u, generate_xmltv};
pub use services::{ChannelService, ScheduleEngineService, UserService}; pub use services::{ChannelService, ScheduleEngineService, UserService};

View File

@@ -12,6 +12,37 @@ use crate::entities::MediaItem;
use crate::errors::{DomainError, DomainResult}; use crate::errors::{DomainError, DomainResult};
use crate::value_objects::{ContentType, MediaFilter, MediaItemId}; use crate::value_objects::{ContentType, MediaFilter, MediaItemId};
// ============================================================================
// Provider capabilities
// ============================================================================
/// How a provider delivers video to the client.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum StreamingProtocol {
/// HLS playlist (`.m3u8`). Requires hls.js on non-Safari browsers.
Hls,
/// Direct file URL with Range-header support. Native `<video>` element.
DirectFile,
}
/// Feature matrix for a media provider.
///
/// The API and frontend use this to gate calls and hide UI controls that
/// the active provider does not support.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderCapabilities {
pub collections: bool,
pub series: bool,
pub genres: bool,
pub tags: bool,
pub decade: bool,
pub search: bool,
pub streaming_protocol: StreamingProtocol,
/// Whether `POST /files/rescan` is available.
pub rescan: bool,
}
// ============================================================================ // ============================================================================
// Library browsing types // Library browsing types
// ============================================================================ // ============================================================================
@@ -58,6 +89,12 @@ pub struct SeriesSummary {
/// `NoopMediaProvider`) inherit the default and return a clear error. /// `NoopMediaProvider`) inherit the default and return a clear error.
#[async_trait] #[async_trait]
pub trait IMediaProvider: Send + Sync { pub trait IMediaProvider: Send + Sync {
/// Declare what features this provider supports.
///
/// Called at request time (not cached) so the response always reflects the
/// active provider. Implementations return a plain struct — no I/O needed.
fn capabilities(&self) -> ProviderCapabilities;
/// Fetch metadata for all items matching `filter` from this provider. /// Fetch metadata for all items matching `filter` from this provider.
/// ///
/// The provider interprets each field of `MediaFilter` in terms of its own /// The provider interprets each field of `MediaFilter` in terms of its own

View File

@@ -11,6 +11,7 @@ broker-nats = ["dep:futures-util", "k-core/broker-nats"]
auth-oidc = ["dep:openidconnect", "dep:url", "dep:axum-extra"] auth-oidc = ["dep:openidconnect", "dep:url", "dep:axum-extra"]
auth-jwt = ["dep:jsonwebtoken"] auth-jwt = ["dep:jsonwebtoken"]
jellyfin = ["dep:reqwest"] jellyfin = ["dep:reqwest"]
local-files = ["dep:walkdir", "dep:base64", "sqlite"]
[dependencies] [dependencies]
k-core = { git = "https://git.gabrielkaszewski.dev/GKaszewski/k-core", features = [ k-core = { git = "https://git.gabrielkaszewski.dev/GKaszewski/k-core", features = [
@@ -46,3 +47,5 @@ jsonwebtoken = { version = "10.2.0", features = [
"rsa", "rsa",
"rust_crypto", "rust_crypto",
], optional = true } ], optional = true }
walkdir = { version = "2", optional = true }
base64 = { version = "0.22", optional = true }

View File

@@ -2,7 +2,7 @@ use async_trait::async_trait;
use domain::{ use domain::{
Collection, ContentType, DomainError, DomainResult, IMediaProvider, MediaFilter, MediaItem, Collection, ContentType, DomainError, DomainResult, IMediaProvider, MediaFilter, MediaItem,
MediaItemId, SeriesSummary, MediaItemId, ProviderCapabilities, SeriesSummary, StreamingProtocol,
}; };
use super::config::JellyfinConfig; use super::config::JellyfinConfig;
@@ -129,6 +129,19 @@ impl JellyfinMediaProvider {
#[async_trait] #[async_trait]
impl IMediaProvider for JellyfinMediaProvider { impl IMediaProvider for JellyfinMediaProvider {
fn capabilities(&self) -> ProviderCapabilities {
ProviderCapabilities {
collections: true,
series: true,
genres: true,
tags: true,
decade: true,
search: true,
streaming_protocol: StreamingProtocol::Hls,
rescan: false,
}
}
/// Fetch items matching `filter` from the Jellyfin library. /// Fetch items matching `filter` from the Jellyfin library.
/// ///
/// When `series_names` has more than one entry the results from each series /// When `series_names` has more than one entry the results from each series

View File

@@ -21,6 +21,9 @@ mod channel_repository;
mod schedule_repository; mod schedule_repository;
mod user_repository; mod user_repository;
#[cfg(feature = "local-files")]
pub mod local_files;
// Re-export for convenience // Re-export for convenience
pub use db::run_migrations; pub use db::run_migrations;
@@ -33,3 +36,6 @@ pub use schedule_repository::SqliteScheduleRepository;
#[cfg(feature = "jellyfin")] #[cfg(feature = "jellyfin")]
pub use jellyfin::{JellyfinConfig, JellyfinMediaProvider}; pub use jellyfin::{JellyfinConfig, JellyfinMediaProvider};
#[cfg(feature = "local-files")]
pub use local_files::{LocalFilesConfig, LocalFilesProvider, LocalIndex, decode_stream_id};

View File

@@ -0,0 +1,9 @@
use std::path::PathBuf;
/// Configuration for the local files media provider.
pub struct LocalFilesConfig {
/// Root directory containing video files. All files are served relative to this.
pub root_dir: PathBuf,
/// Public base URL of this API server, used to build stream URLs.
pub base_url: String,
}

View File

@@ -0,0 +1,182 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use chrono::Utc;
use tokio::sync::RwLock;
use tracing::{error, info};
use domain::MediaItemId;
use super::config::LocalFilesConfig;
use super::scanner::{scan_dir, LocalFileItem};
/// Encode a rel-path string into a URL-safe, padding-free base64 MediaItemId.
pub fn encode_id(rel_path: &str) -> MediaItemId {
use base64::Engine as _;
MediaItemId::new(
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(rel_path.as_bytes()),
)
}
/// Decode a MediaItemId back to a relative path string.
pub fn decode_id(id: &MediaItemId) -> Option<String> {
use base64::Engine as _;
let bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
.decode(id.as_ref())
.ok()?;
String::from_utf8(bytes).ok()
}
/// In-memory (+ SQLite-backed) index of local video files.
///
/// On startup the index is populated from the SQLite cache so the provider can
/// serve requests immediately. A background task calls `rescan()` to pick up
/// any changes on disk and write them back to the cache.
pub struct LocalIndex {
items: Arc<RwLock<HashMap<MediaItemId, LocalFileItem>>>,
pub root_dir: PathBuf,
pool: sqlx::SqlitePool,
}
impl LocalIndex {
/// Create the index, immediately loading persisted entries from SQLite.
pub async fn new(config: &LocalFilesConfig, pool: sqlx::SqlitePool) -> Self {
let idx = Self {
items: Arc::new(RwLock::new(HashMap::new())),
root_dir: config.root_dir.clone(),
pool,
};
idx.load_from_db().await;
idx
}
/// Load previously scanned items from SQLite (instant on startup).
async fn load_from_db(&self) {
#[derive(sqlx::FromRow)]
struct Row {
id: String,
rel_path: String,
title: String,
duration_secs: i64,
year: Option<i64>,
tags: String,
top_dir: String,
}
let rows = sqlx::query_as::<_, Row>(
"SELECT id, rel_path, title, duration_secs, year, tags, top_dir FROM local_files_index",
)
.fetch_all(&self.pool)
.await;
match rows {
Ok(rows) => {
let mut map = self.items.write().await;
for row in rows {
let tags: Vec<String> =
serde_json::from_str(&row.tags).unwrap_or_default();
let item = LocalFileItem {
rel_path: row.rel_path,
title: row.title,
duration_secs: row.duration_secs as u32,
year: row.year.map(|y| y as u16),
tags,
top_dir: row.top_dir,
};
map.insert(MediaItemId::new(row.id), item);
}
info!("Local files index: loaded {} items from DB", map.len());
}
Err(e) => {
// Table might not exist yet on first run — that's fine.
tracing::debug!("Could not load local files index from DB: {}", e);
}
}
}
/// Scan the filesystem for video files and rebuild the index.
///
/// Returns the number of items found. Called on startup (background task)
/// and via `POST /files/rescan`.
pub async fn rescan(&self) -> u32 {
info!("Local files: scanning {:?}", self.root_dir);
let new_items = scan_dir(&self.root_dir).await;
let count = new_items.len() as u32;
// Swap in-memory map.
{
let mut map = self.items.write().await;
map.clear();
for item in &new_items {
let id = encode_id(&item.rel_path);
map.insert(id, item.clone());
}
}
// Persist to SQLite.
if let Err(e) = self.save_to_db(&new_items).await {
error!("Failed to persist local files index: {}", e);
}
info!("Local files: indexed {} items", count);
count
}
async fn save_to_db(&self, items: &[LocalFileItem]) -> Result<(), sqlx::Error> {
// Rebuild the table in one transaction.
let mut tx = self.pool.begin().await?;
sqlx::query("DELETE FROM local_files_index")
.execute(&mut *tx)
.await?;
let now = Utc::now().to_rfc3339();
for item in items {
let id = encode_id(&item.rel_path).into_inner();
let tags_json = serde_json::to_string(&item.tags).unwrap_or_else(|_| "[]".into());
sqlx::query(
"INSERT INTO local_files_index \
(id, rel_path, title, duration_secs, year, tags, top_dir, scanned_at) \
VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
)
.bind(&id)
.bind(&item.rel_path)
.bind(&item.title)
.bind(item.duration_secs as i64)
.bind(item.year.map(|y| y as i64))
.bind(&tags_json)
.bind(&item.top_dir)
.bind(&now)
.execute(&mut *tx)
.await?;
}
tx.commit().await
}
pub async fn get(&self, id: &MediaItemId) -> Option<LocalFileItem> {
self.items.read().await.get(id).cloned()
}
pub async fn get_all(&self) -> Vec<(MediaItemId, LocalFileItem)> {
self.items
.read()
.await
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
}
/// Return unique top-level directories as collection names.
pub async fn collections(&self) -> Vec<String> {
let map = self.items.read().await;
let mut seen = std::collections::HashSet::new();
for item in map.values() {
seen.insert(item.top_dir.clone());
}
let mut dirs: Vec<String> = seen.into_iter().collect();
dirs.sort();
dirs
}
}

View File

@@ -0,0 +1,8 @@
pub mod config;
pub mod index;
pub mod provider;
pub mod scanner;
pub use config::LocalFilesConfig;
pub use index::LocalIndex;
pub use provider::{LocalFilesProvider, decode_stream_id};

View File

@@ -0,0 +1,165 @@
use std::sync::Arc;
use async_trait::async_trait;
use domain::{
Collection, ContentType, DomainError, DomainResult, IMediaProvider, MediaFilter, MediaItem,
MediaItemId, ProviderCapabilities, StreamingProtocol,
};
use super::config::LocalFilesConfig;
use super::index::{LocalIndex, decode_id};
use super::scanner::LocalFileItem;
pub struct LocalFilesProvider {
pub index: Arc<LocalIndex>,
base_url: String,
}
const SHORT_DURATION_SECS: u32 = 1200; // 20 minutes
impl LocalFilesProvider {
pub fn new(index: Arc<LocalIndex>, config: LocalFilesConfig) -> Self {
Self {
index,
base_url: config.base_url.trim_end_matches('/').to_string(),
}
}
}
fn to_media_item(id: MediaItemId, item: &LocalFileItem) -> MediaItem {
let content_type = if item.duration_secs < 1200 {
ContentType::Short
} else {
ContentType::Movie
};
MediaItem {
id,
title: item.title.clone(),
content_type,
duration_secs: item.duration_secs,
description: None,
genres: vec![],
year: item.year,
tags: item.tags.clone(),
series_name: None,
season_number: None,
episode_number: None,
}
}
#[async_trait]
impl IMediaProvider for LocalFilesProvider {
fn capabilities(&self) -> ProviderCapabilities {
ProviderCapabilities {
collections: true,
series: false,
genres: false,
tags: true,
decade: true,
search: true,
streaming_protocol: StreamingProtocol::DirectFile,
rescan: true,
}
}
async fn fetch_items(&self, filter: &MediaFilter) -> DomainResult<Vec<MediaItem>> {
let all = self.index.get_all().await;
let results = all
.into_iter()
.filter_map(|(id, item)| {
// content_type: derive heuristically, then filter
let content_type = if item.duration_secs < SHORT_DURATION_SECS {
ContentType::Short
} else {
ContentType::Movie
};
if let Some(ref ct) = filter.content_type {
if &content_type != ct {
return None;
}
}
// collections: match against top_dir
if !filter.collections.is_empty() && !filter.collections.contains(&item.top_dir) {
return None;
}
// tags: OR — item must have at least one matching tag
if !filter.tags.is_empty() {
let has = filter
.tags
.iter()
.any(|tag| item.tags.iter().any(|t| t.eq_ignore_ascii_case(tag)));
if !has {
return None;
}
}
// decade: year in [decade, decade+9]
if let Some(decade) = filter.decade {
match item.year {
Some(y) if y >= decade && y <= decade + 9 => {}
_ => return None,
}
}
// duration bounds
if let Some(min) = filter.min_duration_secs {
if item.duration_secs < min {
return None;
}
}
if let Some(max) = filter.max_duration_secs {
if item.duration_secs > max {
return None;
}
}
// search_term: case-insensitive substring in title
if let Some(ref q) = filter.search_term {
if !item.title.to_lowercase().contains(&q.to_lowercase()) {
return None;
}
}
Some(to_media_item(id, &item))
})
.collect();
Ok(results)
}
async fn fetch_by_id(&self, item_id: &MediaItemId) -> DomainResult<Option<MediaItem>> {
Ok(self
.index
.get(item_id)
.await
.map(|item| to_media_item(item_id.clone(), &item)))
}
async fn get_stream_url(&self, item_id: &MediaItemId) -> DomainResult<String> {
Ok(format!(
"{}/api/v1/files/stream/{}",
self.base_url,
item_id.as_ref()
))
}
async fn list_collections(&self) -> DomainResult<Vec<Collection>> {
let dirs = self.index.collections().await;
Ok(dirs
.into_iter()
.map(|d| Collection {
id: d.clone(),
name: d,
collection_type: None,
})
.collect())
}
}
/// Decode an encoded ID from a URL path segment to its relative path string.
pub fn decode_stream_id(encoded: &str) -> Option<String> {
decode_id(&MediaItemId::new(encoded))
}

View File

@@ -0,0 +1,164 @@
use std::path::Path;
use tokio::process::Command;
const VIDEO_EXTENSIONS: &[&str] = &["mp4", "mkv", "avi", "mov", "webm", "m4v"];
/// In-memory representation of a scanned local video file.
#[derive(Debug, Clone)]
pub struct LocalFileItem {
/// Relative path from root, with forward slashes (used as the stable ID source).
pub rel_path: String,
pub title: String,
pub duration_secs: u32,
pub year: Option<u16>,
/// Ancestor directory names between root and file (excluding root itself).
pub tags: Vec<String>,
/// First path component under root (used as collection id/name).
pub top_dir: String,
}
/// Walk `root` and return all recognised video files with metadata.
///
/// ffprobe is called for each file to determine duration. Files that cannot be
/// probed are included with `duration_secs = 0` so they still appear in the index.
pub async fn scan_dir(root: &Path) -> Vec<LocalFileItem> {
let mut items = Vec::new();
let walker = walkdir::WalkDir::new(root).follow_links(true);
for entry in walker.into_iter().filter_map(|e| e.ok()) {
if !entry.file_type().is_file() {
continue;
}
let path = entry.path();
let ext = path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase());
let ext = match ext {
Some(ref e) if VIDEO_EXTENSIONS.contains(&e.as_str()) => e.clone(),
_ => continue,
};
let _ = ext; // extension validated, not needed further
let rel = match path.strip_prefix(root) {
Ok(r) => r,
Err(_) => continue,
};
// Normalise to forward-slash string for cross-platform stability.
let rel_path: String = rel
.components()
.map(|c| c.as_os_str().to_string_lossy().into_owned())
.collect::<Vec<_>>()
.join("/");
// Top-level directory under root.
let top_dir = rel
.components()
.next()
.filter(|_| rel.components().count() > 1) // skip if file is at root level
.map(|c| c.as_os_str().to_string_lossy().into_owned())
.unwrap_or_else(|| "__root__".to_string());
// Title: stem with separator chars replaced by spaces.
let stem = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
let title = stem.replace(['_', '-', '.'], " ");
let title = title.trim().to_string();
// Year: first 4-digit number starting with 19xx or 20xx in filename or parent dirs.
let search_str = format!(
"{} {}",
stem,
rel.parent()
.and_then(|p| p.to_str())
.unwrap_or("")
);
let year = extract_year(&search_str);
// Tags: ancestor directory components between root and the file.
let tags: Vec<String> = rel
.parent()
.into_iter()
.flat_map(|p| p.components())
.map(|c| c.as_os_str().to_string_lossy().into_owned())
.filter(|s| !s.is_empty())
.collect();
let duration_secs = get_duration(path).await.unwrap_or(0);
items.push(LocalFileItem {
rel_path,
title,
duration_secs,
year,
tags,
top_dir,
});
}
items
}
/// Extract the first plausible 4-digit year (19002099) from `s`.
fn extract_year(s: &str) -> Option<u16> {
let chars: Vec<char> = s.chars().collect();
let n = chars.len();
if n < 4 {
return None;
}
for i in 0..=(n - 4) {
// All four chars must be ASCII digits.
if !chars[i..i + 4].iter().all(|c| c.is_ascii_digit()) {
continue;
}
// Must start with 19 or 20.
let prefix = chars[i] as u8 * 10 + chars[i + 1] as u8 - b'0' * 11;
// Simpler: just parse and range-check.
let s4: String = chars[i..i + 4].iter().collect();
let num: u16 = s4.parse().ok()?;
if !(1900..=2099).contains(&num) {
continue;
}
// Word-boundary: char before and after must not be digits.
let before_ok = i == 0 || !chars[i - 1].is_ascii_digit();
let after_ok = i + 4 >= n || !chars[i + 4].is_ascii_digit();
let _ = prefix;
if before_ok && after_ok {
return Some(num);
}
}
None
}
/// Run ffprobe to get the duration of `path` in whole seconds.
async fn get_duration(path: &Path) -> Option<u32> {
#[derive(serde::Deserialize)]
struct Fmt {
duration: Option<String>,
}
#[derive(serde::Deserialize)]
struct Out {
format: Fmt,
}
let output = Command::new("ffprobe")
.args([
"-v",
"quiet",
"-print_format",
"json",
"-show_format",
path.to_str()?,
])
.output()
.await
.ok()?;
let parsed: Out = serde_json::from_slice(&output.stdout).ok()?;
let dur: f64 = parsed.format.duration?.parse().ok()?;
Some(dur as u32)
}

View File

@@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS local_files_index (
id TEXT PRIMARY KEY,
rel_path TEXT NOT NULL UNIQUE,
title TEXT NOT NULL,
duration_secs INTEGER NOT NULL DEFAULT 0,
year INTEGER,
tags TEXT NOT NULL DEFAULT '[]',
top_dir TEXT NOT NULL DEFAULT '',
scanned_at TEXT NOT NULL
);

View File

@@ -19,6 +19,7 @@ import type {
FillStrategy, FillStrategy,
ContentType, ContentType,
MediaFilter, MediaFilter,
ProviderCapabilities,
RecyclePolicy, RecyclePolicy,
} from "@/lib/types"; } from "@/lib/types";
@@ -238,6 +239,7 @@ interface AlgorithmicFilterEditorProps {
errors: FieldErrors; errors: FieldErrors;
setFilter: (patch: Partial<MediaFilter>) => void; setFilter: (patch: Partial<MediaFilter>) => void;
setStrategy: (strategy: FillStrategy) => void; setStrategy: (strategy: FillStrategy) => void;
capabilities?: ProviderCapabilities;
} }
function AlgorithmicFilterEditor({ function AlgorithmicFilterEditor({
@@ -246,14 +248,23 @@ function AlgorithmicFilterEditor({
errors, errors,
setFilter, setFilter,
setStrategy, setStrategy,
capabilities,
}: AlgorithmicFilterEditorProps) { }: AlgorithmicFilterEditorProps) {
const [showGenres, setShowGenres] = useState(false); const [showGenres, setShowGenres] = useState(false);
const { data: collections, isLoading: loadingCollections } = useCollections(); const { data: collections, isLoading: loadingCollections } = useCollections();
const { data: series, isLoading: loadingSeries } = useSeries(); const { data: series, isLoading: loadingSeries } = useSeries(undefined, {
const { data: genreOptions } = useGenres(content.filter.content_type ?? undefined); enabled: capabilities?.series !== false,
});
const { data: genreOptions } = useGenres(content.filter.content_type ?? undefined, {
enabled: capabilities?.genres !== false,
});
const isEpisode = content.filter.content_type === "episode"; const isEpisode = content.filter.content_type === "episode";
const collectionLabel =
capabilities?.collections && !capabilities?.series && !capabilities?.genres
? "Directory"
: "Library";
return ( return (
<div className="space-y-3 rounded-md border border-zinc-700/50 bg-zinc-800 p-3"> <div className="space-y-3 rounded-md border border-zinc-700/50 bg-zinc-800 p-3">
@@ -289,8 +300,8 @@ function AlgorithmicFilterEditor({
</Field> </Field>
</div> </div>
{/* Series — only meaningful for episodes */} {/* Series — only meaningful for episodes when provider supports it */}
{isEpisode && ( {isEpisode && capabilities?.series !== false && (
<Field <Field
label="Series" label="Series"
hint={ hint={
@@ -308,15 +319,15 @@ function AlgorithmicFilterEditor({
</Field> </Field>
)} )}
{/* Library — real collection names when the provider supports it */} {/* Library/Directory — real collection names when the provider supports it */}
<Field <Field
label="Library" label={collectionLabel}
hint={ hint={
loadingCollections loadingCollections
? "Loading libraries…" ? `Loading ${collectionLabel.toLowerCase()}s…`
: collections : collections
? "Scope this block to one library" ? `Scope this block to one ${collectionLabel.toLowerCase()}`
: "Enter a provider library ID" : `Enter a provider ${collectionLabel.toLowerCase()} ID`
} }
> >
{collections && collections.length > 0 ? ( {collections && collections.length > 0 ? (
@@ -341,7 +352,8 @@ function AlgorithmicFilterEditor({
)} )}
</Field> </Field>
{/* Genres with browse-from-library shortcut */} {/* Genres — only shown when provider supports it */}
{capabilities?.genres !== false && (
<Field label="Genres" hint="Press Enter or comma to add"> <Field label="Genres" hint="Press Enter or comma to add">
<TagInput <TagInput
values={content.filter.genres} values={content.filter.genres}
@@ -376,6 +388,7 @@ function AlgorithmicFilterEditor({
</div> </div>
)} )}
</Field> </Field>
)}
<Field label="Tags" hint="Press Enter or comma to add"> <Field label="Tags" hint="Press Enter or comma to add">
<TagInput <TagInput
@@ -435,9 +448,10 @@ interface BlockEditorProps {
onChange: (block: ProgrammingBlock) => void; onChange: (block: ProgrammingBlock) => void;
onRemove: () => void; onRemove: () => void;
onSelect: () => void; onSelect: () => void;
capabilities?: ProviderCapabilities;
} }
function BlockEditor({ block, index, isSelected, color, errors, onChange, onRemove, onSelect }: BlockEditorProps) { function BlockEditor({ block, index, isSelected, color, errors, onChange, onRemove, onSelect, capabilities }: BlockEditorProps) {
const [expanded, setExpanded] = useState(isSelected); const [expanded, setExpanded] = useState(isSelected);
const elRef = useRef<HTMLDivElement>(null); const elRef = useRef<HTMLDivElement>(null);
@@ -555,6 +569,7 @@ function BlockEditor({ block, index, isSelected, color, errors, onChange, onRemo
errors={errors} errors={errors}
setFilter={setFilter} setFilter={setFilter}
setStrategy={setStrategy} setStrategy={setStrategy}
capabilities={capabilities}
/> />
{content.strategy === "sequential" && ( {content.strategy === "sequential" && (
@@ -719,6 +734,7 @@ interface EditChannelSheetProps {
) => void; ) => void;
isPending: boolean; isPending: boolean;
error?: string | null; error?: string | null;
capabilities?: ProviderCapabilities;
} }
export function EditChannelSheet({ export function EditChannelSheet({
@@ -728,6 +744,7 @@ export function EditChannelSheet({
onSubmit, onSubmit,
isPending, isPending,
error, error,
capabilities,
}: EditChannelSheetProps) { }: EditChannelSheetProps) {
const [name, setName] = useState(""); const [name, setName] = useState("");
const [description, setDescription] = useState(""); const [description, setDescription] = useState("");
@@ -1027,6 +1044,7 @@ export function EditChannelSheet({
onChange={(b) => updateBlock(idx, b)} onChange={(b) => updateBlock(idx, b)}
onRemove={() => removeBlock(idx)} onRemove={() => removeBlock(idx)}
onSelect={() => setSelectedBlockId(block.id)} onSelect={() => setSelectedBlockId(block.id)}
capabilities={capabilities}
/> />
))} ))}
</div> </div>

View File

@@ -11,6 +11,8 @@ import {
useGenerateSchedule, useGenerateSchedule,
} from "@/hooks/use-channels"; } from "@/hooks/use-channels";
import { useAuthContext } from "@/context/auth-context"; import { useAuthContext } from "@/context/auth-context";
import { useConfig } from "@/hooks/use-config";
import { useRescanLibrary } from "@/hooks/use-library";
import { api } from "@/lib/api"; import { api } from "@/lib/api";
import { toast } from "sonner"; import { toast } from "sonner";
import { useQueryClient } from "@tanstack/react-query"; import { useQueryClient } from "@tanstack/react-query";
@@ -34,11 +36,14 @@ export default function DashboardPage() {
const { token } = useAuthContext(); const { token } = useAuthContext();
const queryClient = useQueryClient(); const queryClient = useQueryClient();
const { data: channels, isLoading, error } = useChannels(); const { data: channels, isLoading, error } = useChannels();
const { data: config } = useConfig();
const capabilities = config?.provider_capabilities;
const createChannel = useCreateChannel(); const createChannel = useCreateChannel();
const updateChannel = useUpdateChannel(); const updateChannel = useUpdateChannel();
const deleteChannel = useDeleteChannel(); const deleteChannel = useDeleteChannel();
const generateSchedule = useGenerateSchedule(); const generateSchedule = useGenerateSchedule();
const rescanLibrary = useRescanLibrary();
// Channel ordering — persisted to localStorage // Channel ordering — persisted to localStorage
const [channelOrder, setChannelOrder] = useState<string[]>([]); const [channelOrder, setChannelOrder] = useState<string[]>([]);
@@ -226,6 +231,22 @@ export default function DashboardPage() {
</p> </p>
</div> </div>
<div className="flex gap-2"> <div className="flex gap-2">
{capabilities?.rescan && (
<Button
onClick={() =>
rescanLibrary.mutate(undefined, {
onSuccess: (d) => toast.success(`Rescan complete: ${d.items_found} files found`),
onError: () => toast.error("Rescan failed"),
})
}
disabled={rescanLibrary.isPending}
title="Rescan local files directory"
className="border-zinc-700 text-zinc-400 hover:text-zinc-100"
>
<RefreshCw className={`size-4 ${rescanLibrary.isPending ? "animate-spin" : ""}`} />
Rescan library
</Button>
)}
{channels && channels.length > 0 && ( {channels && channels.length > 0 && (
<Button <Button
onClick={handleRegenerateAll} onClick={handleRegenerateAll}
@@ -346,6 +367,7 @@ export default function DashboardPage() {
onSubmit={handleEdit} onSubmit={handleEdit}
isPending={updateChannel.isPending} isPending={updateChannel.isPending}
error={updateChannel.error?.message} error={updateChannel.error?.message}
capabilities={capabilities}
/> />
<ScheduleSheet <ScheduleSheet

View File

@@ -17,6 +17,8 @@ interface VideoPlayerProps {
/** Active subtitle track index, or -1 to disable. */ /** Active subtitle track index, or -1 to disable. */
subtitleTrack?: number; subtitleTrack?: number;
muted?: boolean; muted?: boolean;
/** Force direct-file mode (skips hls.js even for .m3u8 URLs). */
streamingProtocol?: "hls" | "direct_file";
onStreamError?: () => void; onStreamError?: () => void;
onSubtitleTracksChange?: (tracks: SubtitleTrack[]) => void; onSubtitleTracksChange?: (tracks: SubtitleTrack[]) => void;
/** Called when the browser blocks autoplay and user interaction is required. */ /** Called when the browser blocks autoplay and user interaction is required. */
@@ -34,6 +36,7 @@ const VideoPlayer = forwardRef<HTMLVideoElement, VideoPlayerProps>(
initialOffset = 0, initialOffset = 0,
subtitleTrack = -1, subtitleTrack = -1,
muted = false, muted = false,
streamingProtocol,
onStreamError, onStreamError,
onSubtitleTracksChange, onSubtitleTracksChange,
onNeedsInteraction, onNeedsInteraction,
@@ -75,7 +78,7 @@ const VideoPlayer = forwardRef<HTMLVideoElement, VideoPlayerProps>(
onSubtitleTracksChange?.([]); onSubtitleTracksChange?.([]);
setIsBuffering(true); setIsBuffering(true);
const isHls = src.includes(".m3u8"); const isHls = streamingProtocol !== "direct_file" && src.includes(".m3u8");
if (isHls && Hls.isSupported()) { if (isHls && Hls.isSupported()) {
const hls = new Hls({ const hls = new Hls({
@@ -117,10 +120,18 @@ const VideoPlayer = forwardRef<HTMLVideoElement, VideoPlayerProps>(
{ once: true }, { once: true },
); );
} else { } else {
// Plain MP4 fallback // Plain MP4 / direct file: seek to offset after metadata loads.
video.src = src; video.src = src;
video.addEventListener(
"loadedmetadata",
() => {
if (initialOffset > 0) video.currentTime = initialOffset;
video.muted = mutedRef.current;
video.play().catch(() => onNeedsInteraction?.());
},
{ once: true },
);
video.load(); video.load();
video.play().catch(() => {});
} }
return () => { return () => {

View File

@@ -0,0 +1,13 @@
"use client";
import { useQuery } from "@tanstack/react-query";
import { api } from "@/lib/api";
import type { ConfigResponse } from "@/lib/types";
export function useConfig() {
return useQuery<ConfigResponse>({
queryKey: ["config"],
queryFn: () => api.config.get(),
staleTime: 5 * 60 * 1000,
});
}

View File

@@ -1,6 +1,6 @@
"use client"; "use client";
import { useQuery } from "@tanstack/react-query"; import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { api } from "@/lib/api"; import { api } from "@/lib/api";
import { useAuthContext } from "@/context/auth-context"; import { useAuthContext } from "@/context/auth-context";
import type { MediaFilter } from "@/lib/types"; import type { MediaFilter } from "@/lib/types";
@@ -23,27 +23,39 @@ export function useCollections() {
* All series are loaded upfront so the series picker can filter client-side * All series are loaded upfront so the series picker can filter client-side
* without a request per keystroke. * without a request per keystroke.
*/ */
export function useSeries(collectionId?: string) { export function useSeries(collectionId?: string, opts?: { enabled?: boolean }) {
const { token } = useAuthContext(); const { token } = useAuthContext();
return useQuery({ return useQuery({
queryKey: ["library", "series", collectionId ?? null], queryKey: ["library", "series", collectionId ?? null],
queryFn: () => api.library.series(token!, collectionId), queryFn: () => api.library.series(token!, collectionId),
enabled: !!token, enabled: !!token && (opts?.enabled ?? true),
staleTime: STALE, staleTime: STALE,
}); });
} }
/** List available genres, optionally scoped to a content type. */ /** List available genres, optionally scoped to a content type. */
export function useGenres(contentType?: string) { export function useGenres(contentType?: string, opts?: { enabled?: boolean }) {
const { token } = useAuthContext(); const { token } = useAuthContext();
return useQuery({ return useQuery({
queryKey: ["library", "genres", contentType ?? null], queryKey: ["library", "genres", contentType ?? null],
queryFn: () => api.library.genres(token!, contentType), queryFn: () => api.library.genres(token!, contentType),
enabled: !!token, enabled: !!token && (opts?.enabled ?? true),
staleTime: STALE, staleTime: STALE,
}); });
} }
/** Trigger a local-files rescan. Only available when `provider_capabilities.rescan` is true. */
export function useRescanLibrary() {
const { token } = useAuthContext();
const queryClient = useQueryClient();
return useMutation({
mutationFn: () => api.files.rescan(token!),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["library"] });
},
});
}
/** /**
* Fetch items matching a filter for the block editor's "Preview results" panel. * Fetch items matching a filter for the block editor's "Preview results" panel.
* Pass `enabled: false` until the user explicitly requests a preview. * Pass `enabled: false` until the user explicitly requests a preview.

View File

@@ -142,6 +142,11 @@ export const api = {
}, },
}, },
files: {
rescan: (token: string) =>
request<{ items_found: number }>("/files/rescan", { method: "POST", token }),
},
schedule: { schedule: {
generate: (channelId: string, token: string) => generate: (channelId: string, token: string) =>
request<ScheduleResponse>(`/channels/${channelId}/schedule`, { request<ScheduleResponse>(`/channels/${channelId}/schedule`, {

View File

@@ -82,8 +82,22 @@ export interface ScheduleConfig {
// Config // Config
export type StreamingProtocol = "hls" | "direct_file";
export interface ProviderCapabilities {
collections: boolean;
series: boolean;
genres: boolean;
tags: boolean;
decade: boolean;
search: boolean;
streaming_protocol: StreamingProtocol;
rescan: boolean;
}
export interface ConfigResponse { export interface ConfigResponse {
allow_registration: boolean; allow_registration: boolean;
provider_capabilities: ProviderCapabilities;
} }
// Auth // Auth