feat: add local files provider with indexing and rescan functionality
- Implemented LocalFilesProvider to manage local video files. - Added LocalIndex for in-memory and SQLite-backed indexing of video files. - Introduced scanning functionality to detect video files and extract metadata. - Added API endpoints for listing collections, genres, and series based on provider capabilities. - Enhanced existing routes to check for provider capabilities before processing requests. - Updated frontend to utilize provider capabilities for conditional rendering of UI elements. - Implemented rescan functionality to refresh the local files index. - Added database migration for local files index schema.
This commit is contained in:
31
k-tv-backend/Cargo.lock
generated
31
k-tv-backend/Cargo.lock
generated
@@ -87,6 +87,7 @@ dependencies = [
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
@@ -1372,6 +1373,7 @@ dependencies = [
|
||||
"async-nats",
|
||||
"async-trait",
|
||||
"axum-extra",
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"domain",
|
||||
"futures-core",
|
||||
@@ -1389,6 +1391,7 @@ dependencies = [
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2468,6 +2471,15 @@ version = "1.0.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.28"
|
||||
@@ -3539,6 +3551,16 @@ version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "want"
|
||||
version = "0.3.1"
|
||||
@@ -3675,6 +3697,15 @@ dependencies = [
|
||||
"wasite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
||||
dependencies = [
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.62.2"
|
||||
|
||||
@@ -10,8 +10,8 @@ FROM debian:bookworm-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install OpenSSL (required for many Rust networking crates) and CA certificates
|
||||
RUN apt-get update && apt-get install -y libssl3 ca-certificates && rm -rf /var/lib/apt/lists/*
|
||||
# Install OpenSSL, CA certs, and ffmpeg (provides ffprobe for local-files duration scanning)
|
||||
RUN apt-get update && apt-get install -y libssl3 ca-certificates ffmpeg && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /app/target/release/api .
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ postgres = ["infra/postgres"]
|
||||
auth-oidc = ["infra/auth-oidc"]
|
||||
auth-jwt = ["infra/auth-jwt"]
|
||||
jellyfin = ["infra/jellyfin"]
|
||||
local-files = ["infra/local-files", "dep:tokio-util"]
|
||||
|
||||
[dependencies]
|
||||
k-core = { git = "https://git.gabrielkaszewski.dev/GKaszewski/k-core", features = [
|
||||
@@ -51,3 +52,4 @@ tracing = "0.1"
|
||||
async-trait = "0.1"
|
||||
dotenvy = "0.15.7"
|
||||
time = "0.3"
|
||||
tokio-util = { version = "0.7", features = ["io"], optional = true }
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
//! Loads configuration from environment variables.
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Application configuration loaded from environment variables
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -40,6 +41,9 @@ pub struct Config {
|
||||
pub jellyfin_api_key: Option<String>,
|
||||
pub jellyfin_user_id: Option<String>,
|
||||
|
||||
/// Root directory for the local-files provider. Set `LOCAL_FILES_DIR` to enable.
|
||||
pub local_files_dir: Option<PathBuf>,
|
||||
|
||||
/// Public base URL of this API server (used to build IPTV stream URLs).
|
||||
pub base_url: String,
|
||||
}
|
||||
@@ -114,6 +118,8 @@ impl Config {
|
||||
let jellyfin_api_key = env::var("JELLYFIN_API_KEY").ok();
|
||||
let jellyfin_user_id = env::var("JELLYFIN_USER_ID").ok();
|
||||
|
||||
let local_files_dir = env::var("LOCAL_FILES_DIR").ok().map(PathBuf::from);
|
||||
|
||||
let base_url = env::var("BASE_URL")
|
||||
.unwrap_or_else(|_| format!("http://localhost:{}", port));
|
||||
|
||||
@@ -140,6 +146,7 @@ impl Config {
|
||||
jellyfin_base_url,
|
||||
jellyfin_api_key,
|
||||
jellyfin_user_id,
|
||||
local_files_dir,
|
||||
base_url,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@ pub struct TokenResponse {
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ConfigResponse {
|
||||
pub allow_registration: bool,
|
||||
pub provider_capabilities: domain::ProviderCapabilities,
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
|
||||
@@ -35,6 +35,12 @@ pub enum ApiError {
|
||||
|
||||
#[error("auth_required")]
|
||||
AuthRequired,
|
||||
|
||||
#[error("Not found: {0}")]
|
||||
NotFound(String),
|
||||
|
||||
#[error("Not implemented: {0}")]
|
||||
NotImplemented(String),
|
||||
}
|
||||
|
||||
/// Error response body
|
||||
@@ -132,6 +138,22 @@ impl IntoResponse for ApiError {
|
||||
details: None,
|
||||
},
|
||||
),
|
||||
|
||||
ApiError::NotFound(msg) => (
|
||||
StatusCode::NOT_FOUND,
|
||||
ErrorResponse {
|
||||
error: "Not found".to_string(),
|
||||
details: Some(msg.clone()),
|
||||
},
|
||||
),
|
||||
|
||||
ApiError::NotImplemented(msg) => (
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
ErrorResponse {
|
||||
error: "Not implemented".to_string(),
|
||||
details: Some(msg.clone()),
|
||||
},
|
||||
),
|
||||
};
|
||||
|
||||
(status, Json(error_response)).into_response()
|
||||
@@ -146,6 +168,14 @@ impl ApiError {
|
||||
pub fn internal(msg: impl Into<String>) -> Self {
|
||||
Self::Internal(msg.into())
|
||||
}
|
||||
|
||||
pub fn not_found(msg: impl Into<String>) -> Self {
|
||||
Self::NotFound(msg.into())
|
||||
}
|
||||
|
||||
pub fn not_implemented(msg: impl Into<String>) -> Self {
|
||||
Self::NotImplemented(msg.into())
|
||||
}
|
||||
}
|
||||
|
||||
/// Result type alias for API handlers
|
||||
|
||||
@@ -10,7 +10,7 @@ use axum::http::{HeaderName, HeaderValue};
|
||||
use std::sync::Arc;
|
||||
use tower_http::cors::{AllowHeaders, AllowMethods, AllowOrigin, CorsLayer};
|
||||
|
||||
use domain::{ChannelService, IMediaProvider, ScheduleEngineService, UserService};
|
||||
use domain::{ChannelService, IMediaProvider, ProviderCapabilities, ScheduleEngineService, StreamingProtocol, UserService};
|
||||
use infra::factory::{build_channel_repository, build_schedule_repository, build_user_repository};
|
||||
use infra::run_migrations;
|
||||
use k_core::http::server::{ServerConfig, apply_standard_middleware};
|
||||
@@ -72,8 +72,52 @@ async fn main() -> anyhow::Result<()> {
|
||||
let user_service = UserService::new(user_repo);
|
||||
let channel_service = ChannelService::new(channel_repo.clone());
|
||||
|
||||
// Build media provider — Jellyfin if configured, no-op fallback otherwise.
|
||||
let media_provider: Arc<dyn IMediaProvider> = build_media_provider(&config);
|
||||
// Build media provider — Jellyfin → local-files → noop, first match wins.
|
||||
#[cfg(feature = "local-files")]
|
||||
let mut local_index: Option<Arc<infra::LocalIndex>> = None;
|
||||
|
||||
let mut maybe_provider: Option<Arc<dyn IMediaProvider>> = None;
|
||||
|
||||
#[cfg(feature = "jellyfin")]
|
||||
if let (Some(base_url), Some(api_key), Some(user_id)) = (
|
||||
&config.jellyfin_base_url,
|
||||
&config.jellyfin_api_key,
|
||||
&config.jellyfin_user_id,
|
||||
) {
|
||||
tracing::info!("Media provider: Jellyfin at {}", base_url);
|
||||
maybe_provider = Some(Arc::new(infra::JellyfinMediaProvider::new(infra::JellyfinConfig {
|
||||
base_url: base_url.clone(),
|
||||
api_key: api_key.clone(),
|
||||
user_id: user_id.clone(),
|
||||
})));
|
||||
}
|
||||
|
||||
#[cfg(feature = "local-files")]
|
||||
if maybe_provider.is_none() {
|
||||
if let Some(dir) = &config.local_files_dir {
|
||||
if let k_core::db::DatabasePool::Sqlite(ref sqlite_pool) = db_pool {
|
||||
tracing::info!("Media provider: local files at {:?}", dir);
|
||||
let lf_cfg = infra::LocalFilesConfig {
|
||||
root_dir: dir.clone(),
|
||||
base_url: config.base_url.clone(),
|
||||
};
|
||||
let idx = Arc::new(infra::LocalIndex::new(&lf_cfg, sqlite_pool.clone()).await);
|
||||
local_index = Some(Arc::clone(&idx));
|
||||
let scan_idx = Arc::clone(&idx);
|
||||
tokio::spawn(async move { scan_idx.rescan().await; });
|
||||
maybe_provider = Some(Arc::new(infra::LocalFilesProvider::new(idx, lf_cfg)));
|
||||
} else {
|
||||
tracing::warn!("local-files requires SQLite; ignoring LOCAL_FILES_DIR");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let media_provider: Arc<dyn IMediaProvider> = maybe_provider.unwrap_or_else(|| {
|
||||
tracing::warn!(
|
||||
"No media provider configured. Set JELLYFIN_BASE_URL / LOCAL_FILES_DIR."
|
||||
);
|
||||
Arc::new(NoopMediaProvider)
|
||||
});
|
||||
|
||||
let bg_channel_repo = channel_repo.clone();
|
||||
let schedule_engine = ScheduleEngineService::new(
|
||||
@@ -82,7 +126,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
schedule_repo,
|
||||
);
|
||||
|
||||
let state = AppState::new(
|
||||
#[allow(unused_mut)]
|
||||
let mut state = AppState::new(
|
||||
user_service,
|
||||
channel_service,
|
||||
schedule_engine,
|
||||
@@ -91,6 +136,11 @@ async fn main() -> anyhow::Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
#[cfg(feature = "local-files")]
|
||||
{
|
||||
state.local_index = local_index;
|
||||
}
|
||||
|
||||
let server_config = ServerConfig {
|
||||
cors_origins: config.cors_allowed_origins.clone(),
|
||||
};
|
||||
@@ -141,31 +191,6 @@ async fn main() -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Build the media provider from config.
|
||||
/// Falls back to a no-op provider that returns an informative error when
|
||||
/// Jellyfin env vars are not set, so other API features still work in dev.
|
||||
fn build_media_provider(config: &Config) -> Arc<dyn IMediaProvider> {
|
||||
#[cfg(feature = "jellyfin")]
|
||||
if let (Some(base_url), Some(api_key), Some(user_id)) = (
|
||||
&config.jellyfin_base_url,
|
||||
&config.jellyfin_api_key,
|
||||
&config.jellyfin_user_id,
|
||||
) {
|
||||
tracing::info!("Media provider: Jellyfin at {}", base_url);
|
||||
return Arc::new(infra::JellyfinMediaProvider::new(infra::JellyfinConfig {
|
||||
base_url: base_url.clone(),
|
||||
api_key: api_key.clone(),
|
||||
user_id: user_id.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
tracing::warn!(
|
||||
"No media provider configured. Set JELLYFIN_BASE_URL, JELLYFIN_API_KEY, \
|
||||
and JELLYFIN_USER_ID to enable schedule generation."
|
||||
);
|
||||
Arc::new(NoopMediaProvider)
|
||||
}
|
||||
|
||||
/// Stand-in provider used when no real media source is configured.
|
||||
/// Returns a descriptive error for every call so schedule endpoints fail
|
||||
/// gracefully rather than panicking at startup.
|
||||
@@ -173,14 +198,25 @@ struct NoopMediaProvider;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl IMediaProvider for NoopMediaProvider {
|
||||
fn capabilities(&self) -> ProviderCapabilities {
|
||||
ProviderCapabilities {
|
||||
collections: false,
|
||||
series: false,
|
||||
genres: false,
|
||||
tags: false,
|
||||
decade: false,
|
||||
search: false,
|
||||
streaming_protocol: StreamingProtocol::DirectFile,
|
||||
rescan: false,
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_items(
|
||||
&self,
|
||||
_: &domain::MediaFilter,
|
||||
) -> domain::DomainResult<Vec<domain::MediaItem>> {
|
||||
Err(domain::DomainError::InfrastructureError(
|
||||
"No media provider configured. Set JELLYFIN_BASE_URL, JELLYFIN_API_KEY, \
|
||||
and JELLYFIN_USER_ID."
|
||||
.into(),
|
||||
"No media provider configured. Set JELLYFIN_BASE_URL or LOCAL_FILES_DIR.".into(),
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use axum::{Json, Router, extract::State, routing::get};
|
||||
use std::sync::Arc;
|
||||
use crate::config::Config;
|
||||
|
||||
use crate::dto::ConfigResponse;
|
||||
use crate::state::AppState;
|
||||
|
||||
@@ -8,8 +7,9 @@ pub fn router() -> Router<AppState> {
|
||||
Router::new().route("/", get(get_config))
|
||||
}
|
||||
|
||||
async fn get_config(State(config): State<Arc<Config>>) -> Json<ConfigResponse> {
|
||||
async fn get_config(State(state): State<AppState>) -> Json<ConfigResponse> {
|
||||
Json(ConfigResponse {
|
||||
allow_registration: config.allow_registration,
|
||||
allow_registration: state.config.allow_registration,
|
||||
provider_capabilities: state.media_provider.capabilities(),
|
||||
})
|
||||
}
|
||||
|
||||
153
k-tv-backend/api/src/routes/files.rs
Normal file
153
k-tv-backend/api/src/routes/files.rs
Normal file
@@ -0,0 +1,153 @@
|
||||
//! Local-file streaming and rescan routes
|
||||
//!
|
||||
//! GET /files/stream/:encoded_id — serve a local video file with Range support
|
||||
//! POST /files/rescan — trigger an index rebuild (auth required)
|
||||
|
||||
use axum::{
|
||||
Router,
|
||||
extract::{Path, State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::Response,
|
||||
routing::{get, post},
|
||||
};
|
||||
|
||||
use crate::{error::ApiError, extractors::CurrentUser, state::AppState};
|
||||
|
||||
pub fn router() -> Router<AppState> {
|
||||
let r = Router::new().route("/stream/{id}", get(stream_file));
|
||||
#[cfg(feature = "local-files")]
|
||||
let r = r.route("/rescan", post(trigger_rescan));
|
||||
r
|
||||
}
|
||||
|
||||
/// Stream a local video file, honouring `Range` headers for seeking.
|
||||
///
|
||||
/// The path segment is a base64url-encoded relative path produced by the
|
||||
/// `LocalFilesProvider`. No authentication required — the ID is not guessable
|
||||
/// without knowing the filesystem layout.
|
||||
async fn stream_file(
|
||||
State(state): State<AppState>,
|
||||
Path(encoded_id): Path<String>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Response, ApiError> {
|
||||
#[cfg(feature = "local-files")]
|
||||
{
|
||||
use axum::body::Body;
|
||||
use std::io::SeekFrom;
|
||||
use tokio::io::{AsyncReadExt as _, AsyncSeekExt as _};
|
||||
use tokio_util::io::ReaderStream;
|
||||
|
||||
let root_dir = state.config.local_files_dir.as_ref().ok_or_else(|| {
|
||||
ApiError::not_implemented("LOCAL_FILES_DIR not configured")
|
||||
})?;
|
||||
|
||||
let rel = infra::local_files::decode_stream_id(&encoded_id)
|
||||
.ok_or_else(|| ApiError::validation("invalid stream id"))?;
|
||||
|
||||
// Security: canonicalise and verify the path stays inside root.
|
||||
let full_path = root_dir.join(&rel);
|
||||
let canonical_root = root_dir
|
||||
.canonicalize()
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||
let canonical = full_path
|
||||
.canonicalize()
|
||||
.map_err(|_| ApiError::not_found("file not found"))?;
|
||||
if !canonical.starts_with(&canonical_root) {
|
||||
return Err(ApiError::Forbidden("path traversal detected".into()));
|
||||
}
|
||||
|
||||
let mut file = tokio::fs::File::open(&canonical)
|
||||
.await
|
||||
.map_err(|_| ApiError::not_found("file not found"))?;
|
||||
let file_size = file
|
||||
.metadata()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?
|
||||
.len();
|
||||
|
||||
let ext = canonical
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
let content_type = content_type_for_ext(&ext);
|
||||
|
||||
// Parse Range header.
|
||||
let range = headers
|
||||
.get(axum::http::header::RANGE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|r| parse_range(r, file_size));
|
||||
|
||||
let (start, end, status) = if let Some((s, e)) = range {
|
||||
(s, e.min(file_size.saturating_sub(1)), StatusCode::PARTIAL_CONTENT)
|
||||
} else {
|
||||
(0, file_size.saturating_sub(1), StatusCode::OK)
|
||||
};
|
||||
let length = end - start + 1;
|
||||
|
||||
file.seek(SeekFrom::Start(start))
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||
|
||||
let stream = ReaderStream::new(file.take(length));
|
||||
let body = Body::from_stream(stream);
|
||||
|
||||
let mut builder = Response::builder()
|
||||
.status(status)
|
||||
.header("Content-Type", content_type)
|
||||
.header("Content-Length", length.to_string())
|
||||
.header("Accept-Ranges", "bytes");
|
||||
|
||||
if status == StatusCode::PARTIAL_CONTENT {
|
||||
builder = builder.header(
|
||||
"Content-Range",
|
||||
format!("bytes {}-{}/{}", start, end, file_size),
|
||||
);
|
||||
}
|
||||
|
||||
return builder.body(body).map_err(|e| ApiError::internal(e.to_string()));
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "local-files"))]
|
||||
Err(ApiError::not_implemented("local-files feature not enabled"))
|
||||
}
|
||||
|
||||
/// Trigger a filesystem rescan and return the number of items found.
|
||||
#[cfg(feature = "local-files")]
|
||||
async fn trigger_rescan(
|
||||
State(state): State<AppState>,
|
||||
CurrentUser(_user): CurrentUser,
|
||||
) -> Result<axum::Json<serde_json::Value>, ApiError> {
|
||||
let index = state
|
||||
.local_index
|
||||
.as_ref()
|
||||
.ok_or_else(|| ApiError::not_implemented("no local files provider active"))?;
|
||||
let count = index.rescan().await;
|
||||
Ok(axum::Json(serde_json::json!({ "items_found": count })))
|
||||
}
|
||||
|
||||
fn content_type_for_ext(ext: &str) -> &'static str {
|
||||
match ext {
|
||||
"mp4" | "m4v" => "video/mp4",
|
||||
"mkv" => "video/x-matroska",
|
||||
"avi" => "video/x-msvideo",
|
||||
"mov" => "video/quicktime",
|
||||
"webm" => "video/webm",
|
||||
_ => "application/octet-stream",
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_range(range: &str, file_size: u64) -> Option<(u64, u64)> {
|
||||
let range = range.strip_prefix("bytes=")?;
|
||||
let (start_str, end_str) = range.split_once('-')?;
|
||||
let start: u64 = start_str.parse().ok()?;
|
||||
let end: u64 = if end_str.is_empty() {
|
||||
file_size.saturating_sub(1)
|
||||
} else {
|
||||
end_str.parse().ok()?
|
||||
};
|
||||
if start > end || start >= file_size {
|
||||
return None;
|
||||
}
|
||||
Some((start, end))
|
||||
}
|
||||
@@ -136,6 +136,11 @@ async fn list_collections(
|
||||
State(state): State<AppState>,
|
||||
CurrentUser(_user): CurrentUser,
|
||||
) -> Result<Json<Vec<CollectionResponse>>, ApiError> {
|
||||
if !state.media_provider.capabilities().collections {
|
||||
return Err(ApiError::not_implemented(
|
||||
"collections not supported by this provider",
|
||||
));
|
||||
}
|
||||
let collections = state.media_provider.list_collections().await?;
|
||||
Ok(Json(collections.into_iter().map(Into::into).collect()))
|
||||
}
|
||||
@@ -146,6 +151,11 @@ async fn list_series(
|
||||
CurrentUser(_user): CurrentUser,
|
||||
Query(params): Query<SeriesQuery>,
|
||||
) -> Result<Json<Vec<SeriesResponse>>, ApiError> {
|
||||
if !state.media_provider.capabilities().series {
|
||||
return Err(ApiError::not_implemented(
|
||||
"series not supported by this provider",
|
||||
));
|
||||
}
|
||||
let series = state
|
||||
.media_provider
|
||||
.list_series(params.collection.as_deref())
|
||||
@@ -159,6 +169,11 @@ async fn list_genres(
|
||||
CurrentUser(_user): CurrentUser,
|
||||
Query(params): Query<GenresQuery>,
|
||||
) -> Result<Json<Vec<String>>, ApiError> {
|
||||
if !state.media_provider.capabilities().genres {
|
||||
return Err(ApiError::not_implemented(
|
||||
"genres not supported by this provider",
|
||||
));
|
||||
}
|
||||
let ct = parse_content_type(params.content_type.as_deref())?;
|
||||
let genres = state.media_provider.list_genres(ct.as_ref()).await?;
|
||||
Ok(Json(genres))
|
||||
|
||||
@@ -8,6 +8,7 @@ use axum::Router;
|
||||
pub mod auth;
|
||||
pub mod channels;
|
||||
pub mod config;
|
||||
pub mod files;
|
||||
pub mod iptv;
|
||||
pub mod library;
|
||||
|
||||
@@ -17,6 +18,7 @@ pub fn api_v1_router() -> Router<AppState> {
|
||||
.nest("/auth", auth::router())
|
||||
.nest("/channels", channels::router())
|
||||
.nest("/config", config::router())
|
||||
.nest("/files", files::router())
|
||||
.nest("/iptv", iptv::router())
|
||||
.nest("/library", library::router())
|
||||
}
|
||||
|
||||
@@ -25,6 +25,9 @@ pub struct AppState {
|
||||
#[cfg(feature = "auth-jwt")]
|
||||
pub jwt_validator: Option<Arc<JwtValidator>>,
|
||||
pub config: Arc<Config>,
|
||||
/// Index for the local-files provider, used by the rescan route.
|
||||
#[cfg(feature = "local-files")]
|
||||
pub local_index: Option<Arc<infra::LocalIndex>>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
@@ -105,6 +108,8 @@ impl AppState {
|
||||
#[cfg(feature = "auth-jwt")]
|
||||
jwt_validator,
|
||||
config: Arc::new(config),
|
||||
#[cfg(feature = "local-files")]
|
||||
local_index: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ pub mod value_objects;
|
||||
// Re-export commonly used types
|
||||
pub use entities::*;
|
||||
pub use errors::{DomainError, DomainResult};
|
||||
pub use ports::{Collection, IMediaProvider, SeriesSummary};
|
||||
pub use ports::{Collection, IMediaProvider, ProviderCapabilities, SeriesSummary, StreamingProtocol};
|
||||
pub use repositories::*;
|
||||
pub use iptv::{generate_m3u, generate_xmltv};
|
||||
pub use services::{ChannelService, ScheduleEngineService, UserService};
|
||||
|
||||
@@ -12,6 +12,37 @@ use crate::entities::MediaItem;
|
||||
use crate::errors::{DomainError, DomainResult};
|
||||
use crate::value_objects::{ContentType, MediaFilter, MediaItemId};
|
||||
|
||||
// ============================================================================
|
||||
// Provider capabilities
|
||||
// ============================================================================
|
||||
|
||||
/// How a provider delivers video to the client.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum StreamingProtocol {
|
||||
/// HLS playlist (`.m3u8`). Requires hls.js on non-Safari browsers.
|
||||
Hls,
|
||||
/// Direct file URL with Range-header support. Native `<video>` element.
|
||||
DirectFile,
|
||||
}
|
||||
|
||||
/// Feature matrix for a media provider.
|
||||
///
|
||||
/// The API and frontend use this to gate calls and hide UI controls that
|
||||
/// the active provider does not support.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProviderCapabilities {
|
||||
pub collections: bool,
|
||||
pub series: bool,
|
||||
pub genres: bool,
|
||||
pub tags: bool,
|
||||
pub decade: bool,
|
||||
pub search: bool,
|
||||
pub streaming_protocol: StreamingProtocol,
|
||||
/// Whether `POST /files/rescan` is available.
|
||||
pub rescan: bool,
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Library browsing types
|
||||
// ============================================================================
|
||||
@@ -58,6 +89,12 @@ pub struct SeriesSummary {
|
||||
/// `NoopMediaProvider`) inherit the default and return a clear error.
|
||||
#[async_trait]
|
||||
pub trait IMediaProvider: Send + Sync {
|
||||
/// Declare what features this provider supports.
|
||||
///
|
||||
/// Called at request time (not cached) so the response always reflects the
|
||||
/// active provider. Implementations return a plain struct — no I/O needed.
|
||||
fn capabilities(&self) -> ProviderCapabilities;
|
||||
|
||||
/// Fetch metadata for all items matching `filter` from this provider.
|
||||
///
|
||||
/// The provider interprets each field of `MediaFilter` in terms of its own
|
||||
|
||||
@@ -11,6 +11,7 @@ broker-nats = ["dep:futures-util", "k-core/broker-nats"]
|
||||
auth-oidc = ["dep:openidconnect", "dep:url", "dep:axum-extra"]
|
||||
auth-jwt = ["dep:jsonwebtoken"]
|
||||
jellyfin = ["dep:reqwest"]
|
||||
local-files = ["dep:walkdir", "dep:base64", "sqlite"]
|
||||
|
||||
[dependencies]
|
||||
k-core = { git = "https://git.gabrielkaszewski.dev/GKaszewski/k-core", features = [
|
||||
@@ -46,3 +47,5 @@ jsonwebtoken = { version = "10.2.0", features = [
|
||||
"rsa",
|
||||
"rust_crypto",
|
||||
], optional = true }
|
||||
walkdir = { version = "2", optional = true }
|
||||
base64 = { version = "0.22", optional = true }
|
||||
|
||||
@@ -2,7 +2,7 @@ use async_trait::async_trait;
|
||||
|
||||
use domain::{
|
||||
Collection, ContentType, DomainError, DomainResult, IMediaProvider, MediaFilter, MediaItem,
|
||||
MediaItemId, SeriesSummary,
|
||||
MediaItemId, ProviderCapabilities, SeriesSummary, StreamingProtocol,
|
||||
};
|
||||
|
||||
use super::config::JellyfinConfig;
|
||||
@@ -129,6 +129,19 @@ impl JellyfinMediaProvider {
|
||||
|
||||
#[async_trait]
|
||||
impl IMediaProvider for JellyfinMediaProvider {
|
||||
fn capabilities(&self) -> ProviderCapabilities {
|
||||
ProviderCapabilities {
|
||||
collections: true,
|
||||
series: true,
|
||||
genres: true,
|
||||
tags: true,
|
||||
decade: true,
|
||||
search: true,
|
||||
streaming_protocol: StreamingProtocol::Hls,
|
||||
rescan: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch items matching `filter` from the Jellyfin library.
|
||||
///
|
||||
/// When `series_names` has more than one entry the results from each series
|
||||
|
||||
@@ -21,6 +21,9 @@ mod channel_repository;
|
||||
mod schedule_repository;
|
||||
mod user_repository;
|
||||
|
||||
#[cfg(feature = "local-files")]
|
||||
pub mod local_files;
|
||||
|
||||
// Re-export for convenience
|
||||
pub use db::run_migrations;
|
||||
|
||||
@@ -33,3 +36,6 @@ pub use schedule_repository::SqliteScheduleRepository;
|
||||
|
||||
#[cfg(feature = "jellyfin")]
|
||||
pub use jellyfin::{JellyfinConfig, JellyfinMediaProvider};
|
||||
|
||||
#[cfg(feature = "local-files")]
|
||||
pub use local_files::{LocalFilesConfig, LocalFilesProvider, LocalIndex, decode_stream_id};
|
||||
|
||||
9
k-tv-backend/infra/src/local_files/config.rs
Normal file
9
k-tv-backend/infra/src/local_files/config.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Configuration for the local files media provider.
|
||||
pub struct LocalFilesConfig {
|
||||
/// Root directory containing video files. All files are served relative to this.
|
||||
pub root_dir: PathBuf,
|
||||
/// Public base URL of this API server, used to build stream URLs.
|
||||
pub base_url: String,
|
||||
}
|
||||
182
k-tv-backend/infra/src/local_files/index.rs
Normal file
182
k-tv-backend/infra/src/local_files/index.rs
Normal file
@@ -0,0 +1,182 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::Utc;
|
||||
use tokio::sync::RwLock;
|
||||
use tracing::{error, info};
|
||||
|
||||
use domain::MediaItemId;
|
||||
|
||||
use super::config::LocalFilesConfig;
|
||||
use super::scanner::{scan_dir, LocalFileItem};
|
||||
|
||||
/// Encode a rel-path string into a URL-safe, padding-free base64 MediaItemId.
|
||||
pub fn encode_id(rel_path: &str) -> MediaItemId {
|
||||
use base64::Engine as _;
|
||||
MediaItemId::new(
|
||||
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(rel_path.as_bytes()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Decode a MediaItemId back to a relative path string.
|
||||
pub fn decode_id(id: &MediaItemId) -> Option<String> {
|
||||
use base64::Engine as _;
|
||||
let bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.decode(id.as_ref())
|
||||
.ok()?;
|
||||
String::from_utf8(bytes).ok()
|
||||
}
|
||||
|
||||
/// In-memory (+ SQLite-backed) index of local video files.
|
||||
///
|
||||
/// On startup the index is populated from the SQLite cache so the provider can
|
||||
/// serve requests immediately. A background task calls `rescan()` to pick up
|
||||
/// any changes on disk and write them back to the cache.
|
||||
pub struct LocalIndex {
|
||||
items: Arc<RwLock<HashMap<MediaItemId, LocalFileItem>>>,
|
||||
pub root_dir: PathBuf,
|
||||
pool: sqlx::SqlitePool,
|
||||
}
|
||||
|
||||
impl LocalIndex {
|
||||
/// Create the index, immediately loading persisted entries from SQLite.
|
||||
pub async fn new(config: &LocalFilesConfig, pool: sqlx::SqlitePool) -> Self {
|
||||
let idx = Self {
|
||||
items: Arc::new(RwLock::new(HashMap::new())),
|
||||
root_dir: config.root_dir.clone(),
|
||||
pool,
|
||||
};
|
||||
idx.load_from_db().await;
|
||||
idx
|
||||
}
|
||||
|
||||
/// Load previously scanned items from SQLite (instant on startup).
|
||||
async fn load_from_db(&self) {
|
||||
#[derive(sqlx::FromRow)]
|
||||
struct Row {
|
||||
id: String,
|
||||
rel_path: String,
|
||||
title: String,
|
||||
duration_secs: i64,
|
||||
year: Option<i64>,
|
||||
tags: String,
|
||||
top_dir: String,
|
||||
}
|
||||
|
||||
let rows = sqlx::query_as::<_, Row>(
|
||||
"SELECT id, rel_path, title, duration_secs, year, tags, top_dir FROM local_files_index",
|
||||
)
|
||||
.fetch_all(&self.pool)
|
||||
.await;
|
||||
|
||||
match rows {
|
||||
Ok(rows) => {
|
||||
let mut map = self.items.write().await;
|
||||
for row in rows {
|
||||
let tags: Vec<String> =
|
||||
serde_json::from_str(&row.tags).unwrap_or_default();
|
||||
let item = LocalFileItem {
|
||||
rel_path: row.rel_path,
|
||||
title: row.title,
|
||||
duration_secs: row.duration_secs as u32,
|
||||
year: row.year.map(|y| y as u16),
|
||||
tags,
|
||||
top_dir: row.top_dir,
|
||||
};
|
||||
map.insert(MediaItemId::new(row.id), item);
|
||||
}
|
||||
info!("Local files index: loaded {} items from DB", map.len());
|
||||
}
|
||||
Err(e) => {
|
||||
// Table might not exist yet on first run — that's fine.
|
||||
tracing::debug!("Could not load local files index from DB: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Scan the filesystem for video files and rebuild the index.
|
||||
///
|
||||
/// Returns the number of items found. Called on startup (background task)
|
||||
/// and via `POST /files/rescan`.
|
||||
pub async fn rescan(&self) -> u32 {
|
||||
info!("Local files: scanning {:?}", self.root_dir);
|
||||
let new_items = scan_dir(&self.root_dir).await;
|
||||
let count = new_items.len() as u32;
|
||||
|
||||
// Swap in-memory map.
|
||||
{
|
||||
let mut map = self.items.write().await;
|
||||
map.clear();
|
||||
for item in &new_items {
|
||||
let id = encode_id(&item.rel_path);
|
||||
map.insert(id, item.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Persist to SQLite.
|
||||
if let Err(e) = self.save_to_db(&new_items).await {
|
||||
error!("Failed to persist local files index: {}", e);
|
||||
}
|
||||
|
||||
info!("Local files: indexed {} items", count);
|
||||
count
|
||||
}
|
||||
|
||||
async fn save_to_db(&self, items: &[LocalFileItem]) -> Result<(), sqlx::Error> {
|
||||
// Rebuild the table in one transaction.
|
||||
let mut tx = self.pool.begin().await?;
|
||||
|
||||
sqlx::query("DELETE FROM local_files_index")
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
let now = Utc::now().to_rfc3339();
|
||||
for item in items {
|
||||
let id = encode_id(&item.rel_path).into_inner();
|
||||
let tags_json = serde_json::to_string(&item.tags).unwrap_or_else(|_| "[]".into());
|
||||
sqlx::query(
|
||||
"INSERT INTO local_files_index \
|
||||
(id, rel_path, title, duration_secs, year, tags, top_dir, scanned_at) \
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
)
|
||||
.bind(&id)
|
||||
.bind(&item.rel_path)
|
||||
.bind(&item.title)
|
||||
.bind(item.duration_secs as i64)
|
||||
.bind(item.year.map(|y| y as i64))
|
||||
.bind(&tags_json)
|
||||
.bind(&item.top_dir)
|
||||
.bind(&now)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
tx.commit().await
|
||||
}
|
||||
|
||||
pub async fn get(&self, id: &MediaItemId) -> Option<LocalFileItem> {
|
||||
self.items.read().await.get(id).cloned()
|
||||
}
|
||||
|
||||
pub async fn get_all(&self) -> Vec<(MediaItemId, LocalFileItem)> {
|
||||
self.items
|
||||
.read()
|
||||
.await
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Return unique top-level directories as collection names.
|
||||
pub async fn collections(&self) -> Vec<String> {
|
||||
let map = self.items.read().await;
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
for item in map.values() {
|
||||
seen.insert(item.top_dir.clone());
|
||||
}
|
||||
let mut dirs: Vec<String> = seen.into_iter().collect();
|
||||
dirs.sort();
|
||||
dirs
|
||||
}
|
||||
}
|
||||
8
k-tv-backend/infra/src/local_files/mod.rs
Normal file
8
k-tv-backend/infra/src/local_files/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod config;
|
||||
pub mod index;
|
||||
pub mod provider;
|
||||
pub mod scanner;
|
||||
|
||||
pub use config::LocalFilesConfig;
|
||||
pub use index::LocalIndex;
|
||||
pub use provider::{LocalFilesProvider, decode_stream_id};
|
||||
165
k-tv-backend/infra/src/local_files/provider.rs
Normal file
165
k-tv-backend/infra/src/local_files/provider.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use domain::{
|
||||
Collection, ContentType, DomainError, DomainResult, IMediaProvider, MediaFilter, MediaItem,
|
||||
MediaItemId, ProviderCapabilities, StreamingProtocol,
|
||||
};
|
||||
|
||||
use super::config::LocalFilesConfig;
|
||||
use super::index::{LocalIndex, decode_id};
|
||||
use super::scanner::LocalFileItem;
|
||||
|
||||
pub struct LocalFilesProvider {
|
||||
pub index: Arc<LocalIndex>,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
const SHORT_DURATION_SECS: u32 = 1200; // 20 minutes
|
||||
|
||||
impl LocalFilesProvider {
|
||||
pub fn new(index: Arc<LocalIndex>, config: LocalFilesConfig) -> Self {
|
||||
Self {
|
||||
index,
|
||||
base_url: config.base_url.trim_end_matches('/').to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_media_item(id: MediaItemId, item: &LocalFileItem) -> MediaItem {
|
||||
let content_type = if item.duration_secs < 1200 {
|
||||
ContentType::Short
|
||||
} else {
|
||||
ContentType::Movie
|
||||
};
|
||||
MediaItem {
|
||||
id,
|
||||
title: item.title.clone(),
|
||||
content_type,
|
||||
duration_secs: item.duration_secs,
|
||||
description: None,
|
||||
genres: vec![],
|
||||
year: item.year,
|
||||
tags: item.tags.clone(),
|
||||
series_name: None,
|
||||
season_number: None,
|
||||
episode_number: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl IMediaProvider for LocalFilesProvider {
|
||||
fn capabilities(&self) -> ProviderCapabilities {
|
||||
ProviderCapabilities {
|
||||
collections: true,
|
||||
series: false,
|
||||
genres: false,
|
||||
tags: true,
|
||||
decade: true,
|
||||
search: true,
|
||||
streaming_protocol: StreamingProtocol::DirectFile,
|
||||
rescan: true,
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_items(&self, filter: &MediaFilter) -> DomainResult<Vec<MediaItem>> {
|
||||
let all = self.index.get_all().await;
|
||||
|
||||
let results = all
|
||||
.into_iter()
|
||||
.filter_map(|(id, item)| {
|
||||
// content_type: derive heuristically, then filter
|
||||
let content_type = if item.duration_secs < SHORT_DURATION_SECS {
|
||||
ContentType::Short
|
||||
} else {
|
||||
ContentType::Movie
|
||||
};
|
||||
if let Some(ref ct) = filter.content_type {
|
||||
if &content_type != ct {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// collections: match against top_dir
|
||||
if !filter.collections.is_empty() && !filter.collections.contains(&item.top_dir) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// tags: OR — item must have at least one matching tag
|
||||
if !filter.tags.is_empty() {
|
||||
let has = filter
|
||||
.tags
|
||||
.iter()
|
||||
.any(|tag| item.tags.iter().any(|t| t.eq_ignore_ascii_case(tag)));
|
||||
if !has {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// decade: year in [decade, decade+9]
|
||||
if let Some(decade) = filter.decade {
|
||||
match item.year {
|
||||
Some(y) if y >= decade && y <= decade + 9 => {}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
|
||||
// duration bounds
|
||||
if let Some(min) = filter.min_duration_secs {
|
||||
if item.duration_secs < min {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
if let Some(max) = filter.max_duration_secs {
|
||||
if item.duration_secs > max {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// search_term: case-insensitive substring in title
|
||||
if let Some(ref q) = filter.search_term {
|
||||
if !item.title.to_lowercase().contains(&q.to_lowercase()) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
Some(to_media_item(id, &item))
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
async fn fetch_by_id(&self, item_id: &MediaItemId) -> DomainResult<Option<MediaItem>> {
|
||||
Ok(self
|
||||
.index
|
||||
.get(item_id)
|
||||
.await
|
||||
.map(|item| to_media_item(item_id.clone(), &item)))
|
||||
}
|
||||
|
||||
async fn get_stream_url(&self, item_id: &MediaItemId) -> DomainResult<String> {
|
||||
Ok(format!(
|
||||
"{}/api/v1/files/stream/{}",
|
||||
self.base_url,
|
||||
item_id.as_ref()
|
||||
))
|
||||
}
|
||||
|
||||
async fn list_collections(&self) -> DomainResult<Vec<Collection>> {
|
||||
let dirs = self.index.collections().await;
|
||||
Ok(dirs
|
||||
.into_iter()
|
||||
.map(|d| Collection {
|
||||
id: d.clone(),
|
||||
name: d,
|
||||
collection_type: None,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
||||
/// Decode an encoded ID from a URL path segment to its relative path string.
|
||||
pub fn decode_stream_id(encoded: &str) -> Option<String> {
|
||||
decode_id(&MediaItemId::new(encoded))
|
||||
}
|
||||
164
k-tv-backend/infra/src/local_files/scanner.rs
Normal file
164
k-tv-backend/infra/src/local_files/scanner.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use std::path::Path;
|
||||
use tokio::process::Command;
|
||||
|
||||
const VIDEO_EXTENSIONS: &[&str] = &["mp4", "mkv", "avi", "mov", "webm", "m4v"];
|
||||
|
||||
/// In-memory representation of a scanned local video file.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LocalFileItem {
|
||||
/// Relative path from root, with forward slashes (used as the stable ID source).
|
||||
pub rel_path: String,
|
||||
pub title: String,
|
||||
pub duration_secs: u32,
|
||||
pub year: Option<u16>,
|
||||
/// Ancestor directory names between root and file (excluding root itself).
|
||||
pub tags: Vec<String>,
|
||||
/// First path component under root (used as collection id/name).
|
||||
pub top_dir: String,
|
||||
}
|
||||
|
||||
/// Walk `root` and return all recognised video files with metadata.
|
||||
///
|
||||
/// ffprobe is called for each file to determine duration. Files that cannot be
|
||||
/// probed are included with `duration_secs = 0` so they still appear in the index.
|
||||
pub async fn scan_dir(root: &Path) -> Vec<LocalFileItem> {
|
||||
let mut items = Vec::new();
|
||||
|
||||
let walker = walkdir::WalkDir::new(root).follow_links(true);
|
||||
for entry in walker.into_iter().filter_map(|e| e.ok()) {
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
}
|
||||
let path = entry.path();
|
||||
let ext = path
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.map(|e| e.to_lowercase());
|
||||
let ext = match ext {
|
||||
Some(ref e) if VIDEO_EXTENSIONS.contains(&e.as_str()) => e.clone(),
|
||||
_ => continue,
|
||||
};
|
||||
let _ = ext; // extension validated, not needed further
|
||||
|
||||
let rel = match path.strip_prefix(root) {
|
||||
Ok(r) => r,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
// Normalise to forward-slash string for cross-platform stability.
|
||||
let rel_path: String = rel
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_string_lossy().into_owned())
|
||||
.collect::<Vec<_>>()
|
||||
.join("/");
|
||||
|
||||
// Top-level directory under root.
|
||||
let top_dir = rel
|
||||
.components()
|
||||
.next()
|
||||
.filter(|_| rel.components().count() > 1) // skip if file is at root level
|
||||
.map(|c| c.as_os_str().to_string_lossy().into_owned())
|
||||
.unwrap_or_else(|| "__root__".to_string());
|
||||
|
||||
// Title: stem with separator chars replaced by spaces.
|
||||
let stem = path
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let title = stem.replace(['_', '-', '.'], " ");
|
||||
let title = title.trim().to_string();
|
||||
|
||||
// Year: first 4-digit number starting with 19xx or 20xx in filename or parent dirs.
|
||||
let search_str = format!(
|
||||
"{} {}",
|
||||
stem,
|
||||
rel.parent()
|
||||
.and_then(|p| p.to_str())
|
||||
.unwrap_or("")
|
||||
);
|
||||
let year = extract_year(&search_str);
|
||||
|
||||
// Tags: ancestor directory components between root and the file.
|
||||
let tags: Vec<String> = rel
|
||||
.parent()
|
||||
.into_iter()
|
||||
.flat_map(|p| p.components())
|
||||
.map(|c| c.as_os_str().to_string_lossy().into_owned())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
|
||||
let duration_secs = get_duration(path).await.unwrap_or(0);
|
||||
|
||||
items.push(LocalFileItem {
|
||||
rel_path,
|
||||
title,
|
||||
duration_secs,
|
||||
year,
|
||||
tags,
|
||||
top_dir,
|
||||
});
|
||||
}
|
||||
|
||||
items
|
||||
}
|
||||
|
||||
/// Extract the first plausible 4-digit year (1900–2099) from `s`.
|
||||
fn extract_year(s: &str) -> Option<u16> {
|
||||
let chars: Vec<char> = s.chars().collect();
|
||||
let n = chars.len();
|
||||
if n < 4 {
|
||||
return None;
|
||||
}
|
||||
for i in 0..=(n - 4) {
|
||||
// All four chars must be ASCII digits.
|
||||
if !chars[i..i + 4].iter().all(|c| c.is_ascii_digit()) {
|
||||
continue;
|
||||
}
|
||||
// Must start with 19 or 20.
|
||||
let prefix = chars[i] as u8 * 10 + chars[i + 1] as u8 - b'0' * 11;
|
||||
// Simpler: just parse and range-check.
|
||||
let s4: String = chars[i..i + 4].iter().collect();
|
||||
let num: u16 = s4.parse().ok()?;
|
||||
if !(1900..=2099).contains(&num) {
|
||||
continue;
|
||||
}
|
||||
// Word-boundary: char before and after must not be digits.
|
||||
let before_ok = i == 0 || !chars[i - 1].is_ascii_digit();
|
||||
let after_ok = i + 4 >= n || !chars[i + 4].is_ascii_digit();
|
||||
let _ = prefix;
|
||||
if before_ok && after_ok {
|
||||
return Some(num);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Run ffprobe to get the duration of `path` in whole seconds.
|
||||
async fn get_duration(path: &Path) -> Option<u32> {
|
||||
#[derive(serde::Deserialize)]
|
||||
struct Fmt {
|
||||
duration: Option<String>,
|
||||
}
|
||||
#[derive(serde::Deserialize)]
|
||||
struct Out {
|
||||
format: Fmt,
|
||||
}
|
||||
|
||||
let output = Command::new("ffprobe")
|
||||
.args([
|
||||
"-v",
|
||||
"quiet",
|
||||
"-print_format",
|
||||
"json",
|
||||
"-show_format",
|
||||
path.to_str()?,
|
||||
])
|
||||
.output()
|
||||
.await
|
||||
.ok()?;
|
||||
|
||||
let parsed: Out = serde_json::from_slice(&output.stdout).ok()?;
|
||||
let dur: f64 = parsed.format.duration?.parse().ok()?;
|
||||
Some(dur as u32)
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
CREATE TABLE IF NOT EXISTS local_files_index (
|
||||
id TEXT PRIMARY KEY,
|
||||
rel_path TEXT NOT NULL UNIQUE,
|
||||
title TEXT NOT NULL,
|
||||
duration_secs INTEGER NOT NULL DEFAULT 0,
|
||||
year INTEGER,
|
||||
tags TEXT NOT NULL DEFAULT '[]',
|
||||
top_dir TEXT NOT NULL DEFAULT '',
|
||||
scanned_at TEXT NOT NULL
|
||||
);
|
||||
Reference in New Issue
Block a user