[ci skip] f1-stream: add CDN token refresh, SvelteKit frontend, multi-stream layout (Phases 6-8)
- Phase 6: CDN token lifecycle with 3-strategy URL matching and periodic refresh - Phase 7: SvelteKit 2/Svelte 5 frontend with schedule calendar and hls.js player - Phase 8: Multi-stream layout supporting up to 4 simultaneous HLS streams - Update Dockerfile to multi-stage build (Node.js frontend + Python backend) - Switch deployment to :latest tag with Always pull policy for CI-driven deploys - Update Woodpecker CI to use explicit latest tag
This commit is contained in:
parent
6867036087
commit
9fd788b158
19 changed files with 3843 additions and 17 deletions
|
|
@ -21,7 +21,7 @@ steps:
|
|||
context: stacks/f1-stream/files
|
||||
platforms: linux/amd64
|
||||
provenance: false
|
||||
auto_tag: true
|
||||
tags: latest
|
||||
|
||||
- name: deploy
|
||||
image: bitnami/kubectl
|
||||
|
|
|
|||
|
|
@ -1,3 +1,15 @@
|
|||
## Stage 1: Build frontend
|
||||
FROM node:22-slim AS frontend-builder
|
||||
|
||||
WORKDIR /frontend
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json* ./
|
||||
RUN npm install
|
||||
|
||||
COPY frontend/ ./
|
||||
RUN npm run build
|
||||
|
||||
## Stage 2: Python backend + static frontend
|
||||
FROM python:3.13-slim-bookworm
|
||||
|
||||
WORKDIR /app
|
||||
|
|
@ -7,6 +19,9 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|||
|
||||
COPY backend/ ./backend/
|
||||
|
||||
# Copy built frontend into the image
|
||||
COPY --from=frontend-builder /frontend/build ./frontend/build
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["uvicorn", "backend.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
"""F1 Streams - FastAPI backend with schedule, stream extraction, health checking, and HLS proxy."""
|
||||
"""F1 Streams - FastAPI backend with schedule, stream extraction, health checking, HLS proxy, and token refresh."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
|
|
@ -8,11 +9,14 @@ from apscheduler.triggers.cron import CronTrigger
|
|||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from fastapi import FastAPI, Query, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import Response, StreamingResponse
|
||||
|
||||
from backend.extractors import create_extraction_service
|
||||
from backend.proxy import proxy_playlist, relay_stream
|
||||
from backend.schedule import ScheduleService
|
||||
from backend.token_refresh import TokenRefreshManager
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
|
|
@ -22,9 +26,29 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
schedule_service = ScheduleService()
|
||||
extraction_service = create_extraction_service()
|
||||
token_refresh_manager = TokenRefreshManager(extraction_service)
|
||||
scheduler = AsyncIOScheduler()
|
||||
|
||||
|
||||
# --- Pydantic models for request bodies ---
|
||||
|
||||
|
||||
class ActivateStreamRequest(BaseModel):
|
||||
"""Request body for POST /streams/activate."""
|
||||
|
||||
url: str
|
||||
site_key: str = ""
|
||||
|
||||
|
||||
class DeactivateStreamRequest(BaseModel):
|
||||
"""Request body for POST /streams/deactivate."""
|
||||
|
||||
url: str
|
||||
|
||||
|
||||
# --- Scheduled callbacks ---
|
||||
|
||||
|
||||
async def _scheduled_refresh() -> None:
|
||||
"""Callback for APScheduler daily schedule refresh."""
|
||||
logger.info("Running scheduled schedule refresh...")
|
||||
|
|
@ -71,6 +95,22 @@ async def _scheduled_extraction() -> None:
|
|||
)
|
||||
|
||||
|
||||
async def _scheduled_token_refresh() -> None:
|
||||
"""Callback for APScheduler token refresh.
|
||||
|
||||
Only performs work when there are active streams. Re-runs extractors
|
||||
to get fresh CDN tokens for streams being actively watched.
|
||||
"""
|
||||
if not token_refresh_manager.has_active_streams:
|
||||
return
|
||||
|
||||
logger.info("Running scheduled token refresh...")
|
||||
try:
|
||||
await token_refresh_manager.refresh_active_streams()
|
||||
except Exception:
|
||||
logger.exception("Token refresh failed (non-fatal)")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Startup and shutdown lifecycle handler."""
|
||||
|
|
@ -99,8 +139,20 @@ async def lifespan(app: FastAPI):
|
|||
replace_existing=True,
|
||||
)
|
||||
|
||||
# Schedule token refresh every 4 minutes (safe margin for 5-min CDN tokens).
|
||||
# The callback is a no-op when there are no active streams.
|
||||
scheduler.add_job(
|
||||
_scheduled_token_refresh,
|
||||
trigger=IntervalTrigger(minutes=4),
|
||||
id="token_refresh",
|
||||
name="Refresh CDN tokens for active streams",
|
||||
replace_existing=True,
|
||||
)
|
||||
|
||||
scheduler.start()
|
||||
logger.info("APScheduler started - schedule refresh at 03:00 UTC, extraction every 30m")
|
||||
logger.info(
|
||||
"APScheduler started - schedule refresh at 03:00 UTC, extraction every 30m, token refresh every 4m"
|
||||
)
|
||||
|
||||
yield
|
||||
|
||||
|
|
@ -116,8 +168,8 @@ app = FastAPI(title="F1 Streams", lifespan=lifespan)
|
|||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_methods=["GET", "OPTIONS"],
|
||||
allow_headers=["Range"],
|
||||
allow_methods=["GET", "POST", "OPTIONS"],
|
||||
allow_headers=["Range", "Content-Type"],
|
||||
expose_headers=["Content-Range", "Content-Length", "Content-Type"],
|
||||
)
|
||||
|
||||
|
|
@ -130,11 +182,6 @@ async def health():
|
|||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {"service": "f1-streams", "version": "4.0.0"}
|
||||
|
||||
|
||||
# --- Schedule ---
|
||||
|
||||
|
||||
|
|
@ -183,6 +230,79 @@ async def get_all_streams():
|
|||
}
|
||||
|
||||
|
||||
@app.post("/streams/activate")
|
||||
async def activate_stream(body: ActivateStreamRequest):
|
||||
"""Mark a stream as actively being watched.
|
||||
|
||||
When a stream is active, the token refresh manager will periodically
|
||||
re-run the extractor that found it to get fresh CDN tokens before
|
||||
they expire.
|
||||
|
||||
If site_key is not provided, attempts to look it up from the cached
|
||||
streams.
|
||||
|
||||
Body:
|
||||
{"url": "https://...", "site_key": "optional-site-key"}
|
||||
"""
|
||||
url = body.url
|
||||
site_key = body.site_key
|
||||
|
||||
# If site_key not provided, try to look it up from cached streams
|
||||
if not site_key:
|
||||
for streams in extraction_service._cache.values():
|
||||
for stream in streams:
|
||||
if stream.url == url:
|
||||
site_key = stream.site_key
|
||||
break
|
||||
if site_key:
|
||||
break
|
||||
|
||||
if not site_key:
|
||||
return {
|
||||
"status": "error",
|
||||
"detail": "Could not determine site_key for this URL. Provide it explicitly.",
|
||||
}
|
||||
|
||||
token_refresh_manager.mark_stream_active(url, site_key)
|
||||
return {
|
||||
"status": "activated",
|
||||
"url": url,
|
||||
"site_key": site_key,
|
||||
"active_count": len(token_refresh_manager.get_active_streams()),
|
||||
}
|
||||
|
||||
|
||||
@app.post("/streams/deactivate")
|
||||
async def deactivate_stream(body: DeactivateStreamRequest):
|
||||
"""Mark a stream as no longer being watched.
|
||||
|
||||
Stops the token refresh manager from refreshing CDN tokens for this stream.
|
||||
|
||||
Body:
|
||||
{"url": "https://..."}
|
||||
"""
|
||||
token_refresh_manager.mark_stream_inactive(body.url)
|
||||
return {
|
||||
"status": "deactivated",
|
||||
"url": body.url,
|
||||
"active_count": len(token_refresh_manager.get_active_streams()),
|
||||
}
|
||||
|
||||
|
||||
@app.get("/streams/active")
|
||||
async def get_active_streams():
|
||||
"""List currently active streams with their refresh status.
|
||||
|
||||
Returns all streams that are being actively watched, including
|
||||
their current (potentially refreshed) URLs and refresh counts.
|
||||
"""
|
||||
active = token_refresh_manager.get_active_streams()
|
||||
return {
|
||||
"streams": active,
|
||||
"count": len(active),
|
||||
}
|
||||
|
||||
|
||||
@app.get("/extractors")
|
||||
async def get_extractors():
|
||||
"""List registered extractors and their current status."""
|
||||
|
|
@ -220,6 +340,11 @@ def _get_proxy_base(request: Request) -> str:
|
|||
async def proxy_endpoint(
|
||||
request: Request,
|
||||
url: str = Query(..., description="Base64url-encoded m3u8 playlist URL"),
|
||||
quality: int | None = Query(
|
||||
None,
|
||||
description="0-based quality variant index (0=highest bandwidth). "
|
||||
"Only applies to master playlists.",
|
||||
),
|
||||
):
|
||||
"""Proxy an upstream m3u8 playlist with URI rewriting.
|
||||
|
||||
|
|
@ -229,11 +354,22 @@ async def proxy_endpoint(
|
|||
|
||||
The `url` parameter must be base64url-encoded to avoid URL encoding issues.
|
||||
|
||||
Example:
|
||||
If `quality` is specified and the upstream is a master playlist (with
|
||||
multiple quality variants), the proxy will fetch the selected variant's
|
||||
media playlist directly instead of returning the master playlist.
|
||||
Quality index 0 = highest bandwidth, 1 = second highest, etc.
|
||||
|
||||
Examples:
|
||||
GET /proxy?url=aHR0cHM6Ly9leGFtcGxlLmNvbS9zdHJlYW0ubTN1OA
|
||||
GET /proxy?url=aHR0cHM6Ly9leGFtcGxlLmNvbS9zdHJlYW0ubTN1OA&quality=0
|
||||
"""
|
||||
# Check if we have a fresher URL from token refresh
|
||||
fresh_url = token_refresh_manager.get_fresh_url(url)
|
||||
if fresh_url != url:
|
||||
logger.info("Using refreshed URL from token manager")
|
||||
|
||||
proxy_base = _get_proxy_base(request)
|
||||
rewritten = await proxy_playlist(url, proxy_base)
|
||||
rewritten = await proxy_playlist(fresh_url, proxy_base, quality=quality)
|
||||
|
||||
return Response(
|
||||
content=rewritten,
|
||||
|
|
@ -273,6 +409,19 @@ async def relay_endpoint(
|
|||
)
|
||||
|
||||
|
||||
# --- Frontend Static Files ---
|
||||
# Mount the SvelteKit static build AFTER all API routes so API endpoints take priority.
|
||||
_frontend_dir = os.path.join(os.path.dirname(__file__), "..", "frontend", "build")
|
||||
if os.path.exists(_frontend_dir):
|
||||
app.mount("/", StaticFiles(directory=_frontend_dir, html=True), name="frontend")
|
||||
logger.info("Serving frontend from %s", _frontend_dir)
|
||||
else:
|
||||
# Fallback root when no frontend build exists
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {"service": "f1-streams", "version": "5.0.0"}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
"""HLS proxy - fetches upstream m3u8 playlists and relays media segments.
|
||||
|
||||
Two core functions:
|
||||
Three core functions:
|
||||
1. Playlist proxy: fetches an upstream m3u8 playlist, rewrites all URIs
|
||||
to route through our /proxy and /relay endpoints, returns the rewritten
|
||||
playlist to the client.
|
||||
2. Segment relay: fetches an upstream media segment (TS, fMP4, init) and
|
||||
2. Quality selection: when the upstream m3u8 is a master playlist containing
|
||||
multiple quality variants, allows selecting a specific variant by index.
|
||||
3. Segment relay: fetches an upstream media segment (TS, fMP4, init) and
|
||||
streams it to the client using chunked transfer encoding, never buffering
|
||||
the full segment in memory.
|
||||
|
||||
|
|
@ -12,7 +14,10 @@ All responses include CORS headers for browser playback.
|
|||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncGenerator
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from fastapi import HTTPException
|
||||
|
|
@ -38,13 +43,218 @@ USER_AGENT = (
|
|||
)
|
||||
|
||||
|
||||
async def proxy_playlist(encoded_url: str, proxy_base: str) -> str:
|
||||
@dataclass
|
||||
class QualityVariant:
|
||||
"""A single quality variant parsed from a master HLS playlist."""
|
||||
|
||||
index: int # 0-based index in the playlist
|
||||
bandwidth: int # BANDWIDTH value in bits/sec
|
||||
resolution: str # e.g., "1920x1080" or "" if not specified
|
||||
codecs: str # e.g., "avc1.640028,mp4a.40.2" or "" if not specified
|
||||
name: str # e.g., "720p" or "" if not specified
|
||||
uri: str # The variant playlist URI (absolute)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Serialize to a plain dictionary for JSON responses."""
|
||||
return {
|
||||
"index": self.index,
|
||||
"bandwidth": self.bandwidth,
|
||||
"resolution": self.resolution,
|
||||
"codecs": self.codecs,
|
||||
"name": self.name,
|
||||
"uri": self.uri,
|
||||
}
|
||||
|
||||
|
||||
def _is_master_playlist(content: str) -> bool:
|
||||
"""Check if an m3u8 playlist is a master playlist (contains variant streams).
|
||||
|
||||
A master playlist contains #EXT-X-STREAM-INF tags pointing to variant
|
||||
playlists. A media playlist contains #EXTINF tags pointing to segments.
|
||||
|
||||
Args:
|
||||
content: The raw m3u8 playlist text.
|
||||
|
||||
Returns:
|
||||
True if this is a master playlist.
|
||||
"""
|
||||
return "#EXT-X-STREAM-INF:" in content
|
||||
|
||||
|
||||
def parse_quality_variants(content: str, base_url: str) -> list[QualityVariant]:
|
||||
"""Parse quality variants from a master HLS playlist.
|
||||
|
||||
Extracts all #EXT-X-STREAM-INF entries and their associated URIs.
|
||||
|
||||
Args:
|
||||
content: The raw m3u8 master playlist text.
|
||||
base_url: The URL of the playlist (for resolving relative URIs).
|
||||
|
||||
Returns:
|
||||
List of QualityVariant objects sorted by bandwidth (highest first).
|
||||
"""
|
||||
variants: list[QualityVariant] = []
|
||||
lines = content.splitlines()
|
||||
index = 0
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
stripped = line.strip()
|
||||
if not stripped.startswith("#EXT-X-STREAM-INF:"):
|
||||
continue
|
||||
|
||||
# Parse attributes from the STREAM-INF tag
|
||||
attrs = stripped[len("#EXT-X-STREAM-INF:"):]
|
||||
|
||||
bandwidth = _parse_attr_int(attrs, "BANDWIDTH")
|
||||
resolution = _parse_attr_str(attrs, "RESOLUTION")
|
||||
codecs = _parse_attr_quoted(attrs, "CODECS")
|
||||
name = _parse_attr_quoted(attrs, "NAME")
|
||||
|
||||
# The next non-empty, non-comment line is the variant URI
|
||||
uri = ""
|
||||
for j in range(i + 1, len(lines)):
|
||||
next_line = lines[j].strip()
|
||||
if next_line and not next_line.startswith("#"):
|
||||
uri = next_line
|
||||
break
|
||||
|
||||
if not uri:
|
||||
continue
|
||||
|
||||
# Resolve relative URI
|
||||
if not uri.startswith("http://") and not uri.startswith("https://"):
|
||||
uri = urljoin(base_url, uri)
|
||||
|
||||
# Generate a human-readable name if not provided
|
||||
if not name and resolution:
|
||||
# Extract height from resolution (e.g., "1920x1080" -> "1080p")
|
||||
parts = resolution.split("x")
|
||||
if len(parts) == 2:
|
||||
name = f"{parts[1]}p"
|
||||
|
||||
variants.append(QualityVariant(
|
||||
index=index,
|
||||
bandwidth=bandwidth,
|
||||
resolution=resolution,
|
||||
codecs=codecs,
|
||||
name=name,
|
||||
uri=uri,
|
||||
))
|
||||
index += 1
|
||||
|
||||
# Sort by bandwidth descending (highest quality first)
|
||||
variants.sort(key=lambda v: v.bandwidth, reverse=True)
|
||||
# Re-index after sorting
|
||||
for i, v in enumerate(variants):
|
||||
v.index = i
|
||||
|
||||
return variants
|
||||
|
||||
|
||||
def _select_variant_playlist(
|
||||
content: str, base_url: str, variant_index: int
|
||||
) -> str:
|
||||
"""Extract a single variant from a master playlist by index.
|
||||
|
||||
Instead of returning the full master playlist, returns just the selected
|
||||
variant's media playlist URL. The caller should then fetch and proxy that
|
||||
URL instead.
|
||||
|
||||
Args:
|
||||
content: The raw m3u8 master playlist text.
|
||||
base_url: The URL of the playlist (for resolving relative URIs).
|
||||
variant_index: 0-based index of the desired variant (sorted by bandwidth desc).
|
||||
|
||||
Returns:
|
||||
The absolute URL of the selected variant's media playlist.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the variant index is out of range.
|
||||
"""
|
||||
variants = parse_quality_variants(content, base_url)
|
||||
|
||||
if not variants:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Playlist has no quality variants to select from",
|
||||
)
|
||||
|
||||
if variant_index < 0 or variant_index >= len(variants):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Quality index {variant_index} out of range (0-{len(variants) - 1})",
|
||||
)
|
||||
|
||||
selected = variants[variant_index]
|
||||
logger.info(
|
||||
"Selected quality variant %d: %s (%d bps, %s)",
|
||||
variant_index,
|
||||
selected.name or "unknown",
|
||||
selected.bandwidth,
|
||||
selected.resolution or "no resolution",
|
||||
)
|
||||
|
||||
return selected.uri
|
||||
|
||||
|
||||
def _parse_attr_int(attrs: str, name: str) -> int:
|
||||
"""Parse an integer attribute from an HLS tag attribute string.
|
||||
|
||||
Args:
|
||||
attrs: The attribute string (e.g., 'BANDWIDTH=1280000,RESOLUTION=720x480').
|
||||
name: The attribute name to extract.
|
||||
|
||||
Returns:
|
||||
The integer value, or 0 if not found.
|
||||
"""
|
||||
match = re.search(rf"{name}=(\d+)", attrs)
|
||||
return int(match.group(1)) if match else 0
|
||||
|
||||
|
||||
def _parse_attr_str(attrs: str, name: str) -> str:
|
||||
"""Parse a bare (unquoted) string attribute from an HLS tag attribute string.
|
||||
|
||||
Args:
|
||||
attrs: The attribute string.
|
||||
name: The attribute name to extract.
|
||||
|
||||
Returns:
|
||||
The string value, or empty string if not found.
|
||||
"""
|
||||
match = re.search(rf"{name}=([^,\s\"]+)", attrs)
|
||||
return match.group(1) if match else ""
|
||||
|
||||
|
||||
def _parse_attr_quoted(attrs: str, name: str) -> str:
|
||||
"""Parse a quoted string attribute from an HLS tag attribute string.
|
||||
|
||||
Args:
|
||||
attrs: The attribute string.
|
||||
name: The attribute name to extract.
|
||||
|
||||
Returns:
|
||||
The string value (without quotes), or empty string if not found.
|
||||
"""
|
||||
match = re.search(rf'{name}="([^"]*)"', attrs)
|
||||
return match.group(1) if match else ""
|
||||
|
||||
|
||||
async def proxy_playlist(
|
||||
encoded_url: str, proxy_base: str, quality: int | None = None
|
||||
) -> str:
|
||||
"""Fetch an upstream m3u8 playlist and rewrite all URIs through our proxy.
|
||||
|
||||
If the upstream playlist is a master playlist (containing multiple quality
|
||||
variants) and a quality index is specified, fetches the selected variant's
|
||||
media playlist instead and rewrites that.
|
||||
|
||||
Args:
|
||||
encoded_url: Base64url-encoded URL of the upstream m3u8 playlist.
|
||||
proxy_base: The base URL of our proxy service for rewriting URIs
|
||||
(e.g., "https://f1.viktorbarzin.me").
|
||||
quality: Optional 0-based index of the desired quality variant.
|
||||
Only applies when the upstream is a master playlist.
|
||||
Variants are sorted by bandwidth descending (0 = highest).
|
||||
|
||||
Returns:
|
||||
The rewritten m3u8 playlist text.
|
||||
|
|
@ -107,6 +317,68 @@ async def proxy_playlist(encoded_url: str, proxy_base: str) -> str:
|
|||
detail="Upstream response is not a valid HLS playlist",
|
||||
)
|
||||
|
||||
# If this is a master playlist and a quality variant was requested,
|
||||
# fetch the selected variant's media playlist instead
|
||||
if quality is not None and _is_master_playlist(content):
|
||||
variant_url = _select_variant_playlist(content, url, quality)
|
||||
logger.info("Fetching selected variant playlist: %s", variant_url)
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(
|
||||
timeout=PLAYLIST_TIMEOUT,
|
||||
follow_redirects=True,
|
||||
headers={
|
||||
"User-Agent": USER_AGENT,
|
||||
"Accept": "*/*",
|
||||
},
|
||||
) as client:
|
||||
variant_response = await client.get(variant_url)
|
||||
|
||||
if variant_response.status_code != 200:
|
||||
logger.warning(
|
||||
"Variant playlist returned HTTP %d for %s",
|
||||
variant_response.status_code,
|
||||
variant_url,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Variant playlist returned HTTP {variant_response.status_code}",
|
||||
)
|
||||
|
||||
content = variant_response.text
|
||||
url = variant_url # Use variant URL as base for relative URI resolution
|
||||
|
||||
if "#EXTM3U" not in content:
|
||||
logger.warning(
|
||||
"Variant playlist is not valid m3u8: %s", variant_url
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail="Variant playlist is not a valid HLS playlist",
|
||||
)
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.error("Timeout fetching variant playlist: %s", variant_url)
|
||||
raise HTTPException(
|
||||
status_code=504, detail="Variant playlist timeout"
|
||||
)
|
||||
except httpx.HTTPError as e:
|
||||
logger.error(
|
||||
"HTTP error fetching variant playlist: %s - %s", variant_url, e
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=502, detail=f"Variant playlist error: {e}"
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Unexpected error fetching variant playlist: %s", variant_url
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Internal error: {e}"
|
||||
)
|
||||
|
||||
# Rewrite all URIs to go through our proxy
|
||||
rewritten = rewrite_playlist(content, url, proxy_base)
|
||||
|
||||
|
|
|
|||
362
stacks/f1-stream/files/backend/token_refresh.py
Normal file
362
stacks/f1-stream/files/backend/token_refresh.py
Normal file
|
|
@ -0,0 +1,362 @@
|
|||
"""Token refresh manager - keeps CDN tokens fresh for active streams.
|
||||
|
||||
CDN tokens embedded in stream URLs expire after 5-30 minutes. During a 2+ hour
|
||||
F1 session, URLs must be refreshed before they expire. This manager periodically
|
||||
re-runs the extractor that found each active stream to get a fresh URL with a
|
||||
new CDN token.
|
||||
|
||||
Usage:
|
||||
1. When a user starts watching, call mark_stream_active(url, site_key)
|
||||
2. The background scheduler calls refresh_active_streams() every 4 minutes
|
||||
3. The proxy calls get_fresh_url(url) to resolve the latest URL
|
||||
4. When the user stops watching, call mark_stream_inactive(url)
|
||||
"""
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActiveStream:
|
||||
"""Tracks a stream that a user is currently watching.
|
||||
|
||||
The original_url is the URL the user initially activated. After a token
|
||||
refresh, current_url may differ (new CDN token, different edge server, etc.)
|
||||
but the original_url remains the key for lookups.
|
||||
"""
|
||||
|
||||
original_url: str
|
||||
current_url: str # May differ from original after refresh
|
||||
site_key: str
|
||||
last_refreshed: str
|
||||
refresh_count: int = 0
|
||||
last_error: str = ""
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Serialize to a plain dictionary for JSON responses."""
|
||||
return {
|
||||
"original_url": self.original_url,
|
||||
"current_url": self.current_url,
|
||||
"site_key": self.site_key,
|
||||
"last_refreshed": self.last_refreshed,
|
||||
"refresh_count": self.refresh_count,
|
||||
"last_error": self.last_error,
|
||||
}
|
||||
|
||||
|
||||
class TokenRefreshManager:
|
||||
"""Manages background token refresh for active streams.
|
||||
|
||||
When a user is watching a stream, the manager periodically re-runs
|
||||
the extractor that found it to get a fresh URL with a new token.
|
||||
The fresh URL is stored so the /proxy endpoint can use it on the
|
||||
next playlist fetch.
|
||||
"""
|
||||
|
||||
def __init__(self, extraction_service) -> None:
|
||||
"""Initialize the token refresh manager.
|
||||
|
||||
Args:
|
||||
extraction_service: The ExtractionService instance used to
|
||||
re-run extractors and look up streams by site_key.
|
||||
"""
|
||||
# Import here to avoid circular imports at module level
|
||||
from backend.extractors.service import ExtractionService
|
||||
|
||||
self._extraction_service: ExtractionService = extraction_service
|
||||
self._active_streams: dict[str, ActiveStream] = {}
|
||||
self._refresh_interval = 240 # 4 minutes (safe margin for 5-min tokens)
|
||||
|
||||
@property
|
||||
def refresh_interval(self) -> int:
|
||||
"""Refresh interval in seconds."""
|
||||
return self._refresh_interval
|
||||
|
||||
@property
|
||||
def has_active_streams(self) -> bool:
|
||||
"""Whether there are any active streams being watched."""
|
||||
return len(self._active_streams) > 0
|
||||
|
||||
def mark_stream_active(self, url: str, site_key: str) -> None:
|
||||
"""Mark a stream as being actively watched.
|
||||
|
||||
If the stream is already active, this is a no-op (idempotent).
|
||||
|
||||
Args:
|
||||
url: The stream URL the user is watching.
|
||||
site_key: The extractor site_key that found this stream.
|
||||
"""
|
||||
if url in self._active_streams:
|
||||
logger.debug("Stream already active: %s", url)
|
||||
return
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
self._active_streams[url] = ActiveStream(
|
||||
original_url=url,
|
||||
current_url=url,
|
||||
site_key=site_key,
|
||||
last_refreshed=now,
|
||||
)
|
||||
logger.info(
|
||||
"Stream marked active: %s (site_key=%s, total_active=%d)",
|
||||
url,
|
||||
site_key,
|
||||
len(self._active_streams),
|
||||
)
|
||||
|
||||
def mark_stream_inactive(self, url: str) -> None:
|
||||
"""Mark a stream as no longer watched.
|
||||
|
||||
If the stream is not active, this is a no-op.
|
||||
|
||||
Args:
|
||||
url: The original stream URL to deactivate.
|
||||
"""
|
||||
removed = self._active_streams.pop(url, None)
|
||||
if removed:
|
||||
logger.info(
|
||||
"Stream marked inactive: %s (was refreshed %d times, total_active=%d)",
|
||||
url,
|
||||
removed.refresh_count,
|
||||
len(self._active_streams),
|
||||
)
|
||||
else:
|
||||
logger.debug("Stream was not active, nothing to deactivate: %s", url)
|
||||
|
||||
async def refresh_active_streams(self) -> None:
|
||||
"""Re-run extractors for all active streams to get fresh URLs.
|
||||
|
||||
For each active stream, re-runs the extractor that originally found it
|
||||
and tries to match the stream in the new results. If a match is found,
|
||||
updates the current_url. If not, the previous URL is kept (it may still
|
||||
work until its token expires).
|
||||
|
||||
This method is called by the background scheduler every 4 minutes.
|
||||
Token refresh failures are logged but never crash the process.
|
||||
"""
|
||||
if not self._active_streams:
|
||||
logger.debug("No active streams to refresh")
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Refreshing tokens for %d active stream(s)...",
|
||||
len(self._active_streams),
|
||||
)
|
||||
|
||||
# Group active streams by site_key to avoid re-running the same
|
||||
# extractor multiple times
|
||||
streams_by_site: dict[str, list[ActiveStream]] = {}
|
||||
for stream in self._active_streams.values():
|
||||
streams_by_site.setdefault(stream.site_key, []).append(stream)
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
for site_key, active_list in streams_by_site.items():
|
||||
try:
|
||||
await self._refresh_site(site_key, active_list, now)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to refresh tokens for site_key=%s", site_key
|
||||
)
|
||||
# Mark the error on all streams from this site
|
||||
for stream in active_list:
|
||||
stream.last_error = f"Refresh failed at {now}"
|
||||
|
||||
async def _refresh_site(
|
||||
self, site_key: str, active_list: list[ActiveStream], now: str
|
||||
) -> None:
|
||||
"""Re-run a single extractor and update active streams from its results.
|
||||
|
||||
Args:
|
||||
site_key: The extractor's site_key.
|
||||
active_list: List of ActiveStream objects from this extractor.
|
||||
now: ISO timestamp for this refresh cycle.
|
||||
"""
|
||||
registry = self._extraction_service._registry
|
||||
extractor = registry.get(site_key)
|
||||
|
||||
if extractor is None:
|
||||
logger.warning(
|
||||
"Extractor '%s' not found in registry, skipping refresh",
|
||||
site_key,
|
||||
)
|
||||
for stream in active_list:
|
||||
stream.last_error = f"Extractor '{site_key}' not found"
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Re-running extractor '%s' for token refresh (%d active stream(s))",
|
||||
site_key,
|
||||
len(active_list),
|
||||
)
|
||||
|
||||
# Re-run the extractor to get fresh URLs
|
||||
try:
|
||||
fresh_streams = await extractor.extract()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Extractor '%s' failed during token refresh: %s", site_key, e
|
||||
)
|
||||
for stream in active_list:
|
||||
stream.last_error = f"Extraction failed: {e}"
|
||||
return
|
||||
|
||||
if not fresh_streams:
|
||||
logger.warning(
|
||||
"Extractor '%s' returned no streams during token refresh",
|
||||
site_key,
|
||||
)
|
||||
for stream in active_list:
|
||||
stream.last_error = "Extractor returned no streams"
|
||||
return
|
||||
|
||||
# Build a lookup of fresh URLs by quality+title for matching
|
||||
# Since the URL itself changes (new token), we match by metadata
|
||||
fresh_by_key: dict[str, str] = {}
|
||||
for fs in fresh_streams:
|
||||
# Use quality+title as a matching key (these stay the same across refreshes)
|
||||
match_key = f"{fs.quality}|{fs.title}"
|
||||
fresh_by_key[match_key] = fs.url
|
||||
|
||||
# Also keep all fresh URLs for fallback matching
|
||||
all_fresh_urls = [fs.url for fs in fresh_streams]
|
||||
|
||||
for stream in active_list:
|
||||
# Try to find the matching stream in fresh results
|
||||
# Strategy 1: Match by quality+title
|
||||
match_key = self._build_match_key(stream)
|
||||
if match_key and match_key in fresh_by_key:
|
||||
new_url = fresh_by_key[match_key]
|
||||
if new_url != stream.current_url:
|
||||
logger.info(
|
||||
"Token refreshed for stream (quality+title match): %s -> %s",
|
||||
stream.current_url[:80],
|
||||
new_url[:80],
|
||||
)
|
||||
stream.current_url = new_url
|
||||
stream.last_refreshed = now
|
||||
stream.refresh_count += 1
|
||||
stream.last_error = ""
|
||||
continue
|
||||
|
||||
# Strategy 2: Match by URL path similarity (ignoring query params / tokens)
|
||||
matched_url = self._find_url_by_path(stream.current_url, all_fresh_urls)
|
||||
if matched_url:
|
||||
if matched_url != stream.current_url:
|
||||
logger.info(
|
||||
"Token refreshed for stream (path match): %s -> %s",
|
||||
stream.current_url[:80],
|
||||
matched_url[:80],
|
||||
)
|
||||
stream.current_url = matched_url
|
||||
stream.last_refreshed = now
|
||||
stream.refresh_count += 1
|
||||
stream.last_error = ""
|
||||
continue
|
||||
|
||||
# Strategy 3: If only one fresh stream, assume it's the same
|
||||
if len(all_fresh_urls) == 1:
|
||||
new_url = all_fresh_urls[0]
|
||||
if new_url != stream.current_url:
|
||||
logger.info(
|
||||
"Token refreshed for stream (single result fallback): %s -> %s",
|
||||
stream.current_url[:80],
|
||||
new_url[:80],
|
||||
)
|
||||
stream.current_url = new_url
|
||||
stream.last_refreshed = now
|
||||
stream.refresh_count += 1
|
||||
stream.last_error = ""
|
||||
continue
|
||||
|
||||
# No match found - keep the old URL and log
|
||||
logger.warning(
|
||||
"Could not match active stream to fresh results: %s",
|
||||
stream.original_url[:80],
|
||||
)
|
||||
stream.last_error = "No matching stream in fresh results"
|
||||
|
||||
def _build_match_key(self, stream: ActiveStream) -> str:
|
||||
"""Build a match key from cached stream metadata.
|
||||
|
||||
Looks up the stream in the extraction service cache to get
|
||||
quality and title metadata for matching.
|
||||
|
||||
Returns:
|
||||
A match key string, or empty string if metadata not found.
|
||||
"""
|
||||
# Look up the stream in the extraction cache
|
||||
cached_streams = self._extraction_service._cache.get(stream.site_key, [])
|
||||
for cs in cached_streams:
|
||||
if cs.url == stream.current_url or cs.url == stream.original_url:
|
||||
return f"{cs.quality}|{cs.title}"
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _find_url_by_path(current_url: str, fresh_urls: list[str]) -> str | None:
|
||||
"""Find a fresh URL that matches the current URL by path (ignoring query params).
|
||||
|
||||
CDN token refreshes typically change query parameters but keep the
|
||||
same path structure. This matcher strips query params and compares
|
||||
the path component.
|
||||
|
||||
Args:
|
||||
current_url: The current (possibly expired) URL.
|
||||
fresh_urls: List of fresh URLs to match against.
|
||||
|
||||
Returns:
|
||||
The matching fresh URL, or None if no match.
|
||||
"""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
current_parsed = urlparse(current_url)
|
||||
current_path = current_parsed.path
|
||||
|
||||
for fresh_url in fresh_urls:
|
||||
fresh_parsed = urlparse(fresh_url)
|
||||
# Match on host + path (token is typically in query string)
|
||||
if (
|
||||
fresh_parsed.netloc == current_parsed.netloc
|
||||
and fresh_parsed.path == current_path
|
||||
):
|
||||
return fresh_url
|
||||
|
||||
return None
|
||||
|
||||
def get_fresh_url(self, original_url: str) -> str:
|
||||
"""Get the latest URL for a stream (may have changed due to token refresh).
|
||||
|
||||
If the stream is not active or has not been refreshed, returns the
|
||||
original URL unchanged.
|
||||
|
||||
Args:
|
||||
original_url: The URL to look up (can be the original or any
|
||||
previous current_url).
|
||||
|
||||
Returns:
|
||||
The most recent URL for this stream.
|
||||
"""
|
||||
# Direct lookup by original URL
|
||||
stream = self._active_streams.get(original_url)
|
||||
if stream:
|
||||
return stream.current_url
|
||||
|
||||
# Also check if the URL matches any current_url (in case the caller
|
||||
# is using an intermediate refreshed URL)
|
||||
for stream in self._active_streams.values():
|
||||
if stream.current_url == original_url:
|
||||
return stream.current_url
|
||||
|
||||
# Not an active stream - return as-is
|
||||
return original_url
|
||||
|
||||
def get_active_streams(self) -> list[dict]:
|
||||
"""Return all active streams with their refresh status.
|
||||
|
||||
Returns:
|
||||
List of serialized ActiveStream dicts.
|
||||
"""
|
||||
return [stream.to_dict() for stream in self._active_streams.values()]
|
||||
3
stacks/f1-stream/files/frontend/.gitignore
vendored
Normal file
3
stacks/f1-stream/files/frontend/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
node_modules/
|
||||
build/
|
||||
.svelte-kit/
|
||||
2140
stacks/f1-stream/files/frontend/package-lock.json
generated
Normal file
2140
stacks/f1-stream/files/frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
23
stacks/f1-stream/files/frontend/package.json
Normal file
23
stacks/f1-stream/files/frontend/package.json
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"name": "f1-stream-frontend",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-static": "^3.0.0",
|
||||
"@sveltejs/kit": "^2.0.0",
|
||||
"@sveltejs/vite-plugin-svelte": "^5.0.0",
|
||||
"@tailwindcss/vite": "^4.0.0",
|
||||
"svelte": "^5.0.0",
|
||||
"tailwindcss": "^4.0.0",
|
||||
"vite": "^6.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"hls.js": "^1.5.0"
|
||||
}
|
||||
}
|
||||
35
stacks/f1-stream/files/frontend/src/app.css
Normal file
35
stacks/f1-stream/files/frontend/src/app.css
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
@import "tailwindcss";
|
||||
|
||||
@theme {
|
||||
--color-f1-red: #e10600;
|
||||
--color-f1-red-dark: #b50500;
|
||||
--color-f1-bg: #111111;
|
||||
--color-f1-surface: #1a1a1a;
|
||||
--color-f1-surface-hover: #242424;
|
||||
--color-f1-border: #2a2a2a;
|
||||
--color-f1-text: #e0e0e0;
|
||||
--color-f1-text-muted: #888888;
|
||||
}
|
||||
|
||||
body {
|
||||
background-color: var(--color-f1-bg);
|
||||
color: var(--color-f1-text);
|
||||
font-family: system-ui, -apple-system, sans-serif;
|
||||
}
|
||||
|
||||
/* Scrollbar styling */
|
||||
::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
}
|
||||
::-webkit-scrollbar-track {
|
||||
background: var(--color-f1-bg);
|
||||
}
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: var(--color-f1-border);
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
/* HLS video player */
|
||||
video::-webkit-media-controls {
|
||||
display: none !important;
|
||||
}
|
||||
13
stacks/f1-stream/files/frontend/src/app.html
Normal file
13
stacks/f1-stream/files/frontend/src/app.html
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>🏎</text></svg>" />
|
||||
<title>F1 Stream</title>
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body data-sveltekit-preload-data="hover">
|
||||
<div style="display: contents">%sveltekit.body%</div>
|
||||
</body>
|
||||
</html>
|
||||
74
stacks/f1-stream/files/frontend/src/lib/api.js
Normal file
74
stacks/f1-stream/files/frontend/src/lib/api.js
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
/**
|
||||
* API client for the F1 Streams backend.
|
||||
* All endpoints are on the same origin, so no CORS issues.
|
||||
*/
|
||||
|
||||
const API_BASE = '';
|
||||
|
||||
/**
|
||||
* Fetch the F1 race schedule with session statuses.
|
||||
* @returns {Promise<{season: string, fetched_at: string, races: Array}>}
|
||||
*/
|
||||
export async function fetchSchedule() {
|
||||
const res = await fetch(`${API_BASE}/schedule`);
|
||||
if (!res.ok) throw new Error(`Schedule fetch failed: ${res.status}`);
|
||||
return res.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch available live streams.
|
||||
* @returns {Promise<{streams: Array, count: number}>}
|
||||
*/
|
||||
export async function fetchStreams() {
|
||||
const res = await fetch(`${API_BASE}/streams`);
|
||||
if (!res.ok) throw new Error(`Streams fetch failed: ${res.status}`);
|
||||
return res.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a URL to base64url for the proxy endpoint.
|
||||
* @param {string} rawUrl - The original m3u8 URL
|
||||
* @returns {string} base64url-encoded string
|
||||
*/
|
||||
function toBase64Url(rawUrl) {
|
||||
return btoa(rawUrl).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the proxied m3u8 URL for HLS playback.
|
||||
* @param {string} m3u8Url - The original m3u8 URL
|
||||
* @returns {string} The proxy URL
|
||||
*/
|
||||
export function getProxyUrl(m3u8Url) {
|
||||
const encoded = toBase64Url(m3u8Url);
|
||||
return `${API_BASE}/proxy?url=${encoded}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a stream as actively being watched (enables token refresh).
|
||||
* @param {string} url - The stream URL
|
||||
* @param {string} [siteKey] - Optional site key
|
||||
*/
|
||||
export async function activateStream(url, siteKey = '') {
|
||||
const res = await fetch(`${API_BASE}/streams/activate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url, site_key: siteKey })
|
||||
});
|
||||
if (!res.ok) throw new Error(`Activate failed: ${res.status}`);
|
||||
return res.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a stream as no longer being watched.
|
||||
* @param {string} url - The stream URL
|
||||
*/
|
||||
export async function deactivateStream(url) {
|
||||
const res = await fetch(`${API_BASE}/streams/deactivate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url })
|
||||
});
|
||||
if (!res.ok) throw new Error(`Deactivate failed: ${res.status}`);
|
||||
return res.json();
|
||||
}
|
||||
13
stacks/f1-stream/files/frontend/src/lib/stores.js
Normal file
13
stacks/f1-stream/files/frontend/src/lib/stores.js
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import { writable } from 'svelte/store';
|
||||
|
||||
/** Schedule data store */
|
||||
export const schedule = writable(null);
|
||||
|
||||
/** Streams data store */
|
||||
export const streams = writable(null);
|
||||
|
||||
/** Loading state */
|
||||
export const loading = writable(false);
|
||||
|
||||
/** Error state */
|
||||
export const error = writable(null);
|
||||
2
stacks/f1-stream/files/frontend/src/routes/+layout.js
Normal file
2
stacks/f1-stream/files/frontend/src/routes/+layout.js
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export const prerender = true;
|
||||
export const ssr = false;
|
||||
28
stacks/f1-stream/files/frontend/src/routes/+layout.svelte
Normal file
28
stacks/f1-stream/files/frontend/src/routes/+layout.svelte
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
<script>
|
||||
import '../app.css';
|
||||
|
||||
let { children } = $props();
|
||||
</script>
|
||||
|
||||
<div class="min-h-screen flex flex-col">
|
||||
<header class="border-b border-f1-border bg-f1-surface">
|
||||
<nav class="max-w-6xl mx-auto px-4 py-3 flex items-center gap-6">
|
||||
<a href="/" class="flex items-center gap-2 text-lg font-bold text-white hover:text-f1-red transition-colors">
|
||||
<span class="text-f1-red font-black text-xl">F1</span>
|
||||
<span>Stream</span>
|
||||
</a>
|
||||
<div class="flex gap-4 text-sm">
|
||||
<a href="/" class="text-f1-text-muted hover:text-white transition-colors">Schedule</a>
|
||||
<a href="/watch" class="text-f1-text-muted hover:text-white transition-colors">Watch</a>
|
||||
</div>
|
||||
</nav>
|
||||
</header>
|
||||
|
||||
<main class="flex-1">
|
||||
{@render children()}
|
||||
</main>
|
||||
|
||||
<footer class="border-t border-f1-border py-3 text-center text-xs text-f1-text-muted">
|
||||
F1 Stream
|
||||
</footer>
|
||||
</div>
|
||||
232
stacks/f1-stream/files/frontend/src/routes/+page.svelte
Normal file
232
stacks/f1-stream/files/frontend/src/routes/+page.svelte
Normal file
|
|
@ -0,0 +1,232 @@
|
|||
<script>
|
||||
import { fetchSchedule } from '$lib/api.js';
|
||||
import { onMount } from 'svelte';
|
||||
|
||||
let scheduleData = $state(null);
|
||||
let loading = $state(true);
|
||||
let errorMsg = $state(null);
|
||||
let now = $state(new Date());
|
||||
|
||||
// Update "now" every 30 seconds for live countdown
|
||||
let timer;
|
||||
onMount(() => {
|
||||
loadSchedule();
|
||||
timer = setInterval(() => { now = new Date(); }, 30000);
|
||||
return () => clearInterval(timer);
|
||||
});
|
||||
|
||||
async function loadSchedule() {
|
||||
loading = true;
|
||||
errorMsg = null;
|
||||
try {
|
||||
scheduleData = await fetchSchedule();
|
||||
} catch (e) {
|
||||
errorMsg = e.message;
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the next upcoming session across all races.
|
||||
*/
|
||||
let nextSession = $derived.by(() => {
|
||||
if (!scheduleData?.races) return null;
|
||||
for (const race of scheduleData.races) {
|
||||
for (const session of race.sessions) {
|
||||
if (session.status === 'upcoming') {
|
||||
return { race, session };
|
||||
}
|
||||
if (session.status === 'live') {
|
||||
return { race, session };
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
/**
|
||||
* Format an ISO date string to the user's local timezone.
|
||||
*/
|
||||
function formatLocalTime(isoStr) {
|
||||
const d = new Date(isoStr);
|
||||
return d.toLocaleString(undefined, {
|
||||
weekday: 'short',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a short date (day + month).
|
||||
*/
|
||||
function formatShortDate(isoStr) {
|
||||
const d = new Date(isoStr);
|
||||
return d.toLocaleDateString(undefined, { month: 'short', day: 'numeric' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a time only.
|
||||
*/
|
||||
function formatTime(isoStr) {
|
||||
const d = new Date(isoStr);
|
||||
return d.toLocaleTimeString(undefined, { hour: '2-digit', minute: '2-digit' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute countdown string to a future ISO date.
|
||||
*/
|
||||
function countdown(isoStr) {
|
||||
const target = new Date(isoStr);
|
||||
const diff = target - now;
|
||||
if (diff <= 0) return 'Now';
|
||||
|
||||
const days = Math.floor(diff / (1000 * 60 * 60 * 24));
|
||||
const hours = Math.floor((diff % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60));
|
||||
const mins = Math.floor((diff % (1000 * 60 * 60)) / (1000 * 60));
|
||||
|
||||
if (days > 0) return `${days}d ${hours}h ${mins}m`;
|
||||
if (hours > 0) return `${hours}h ${mins}m`;
|
||||
return `${mins}m`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get status badge classes.
|
||||
*/
|
||||
function statusClasses(status) {
|
||||
switch (status) {
|
||||
case 'live': return 'bg-f1-red text-white';
|
||||
case 'upcoming': return 'bg-blue-600 text-white';
|
||||
case 'past': return 'bg-neutral-700 text-neutral-400';
|
||||
default: return 'bg-neutral-700 text-neutral-400';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if a race has any live or upcoming sessions (to highlight it).
|
||||
*/
|
||||
function raceIsActive(race) {
|
||||
return race.sessions.some(s => s.status === 'live' || s.status === 'upcoming');
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if a race is entirely in the past.
|
||||
*/
|
||||
function raceIsPast(race) {
|
||||
return race.sessions.every(s => s.status === 'past');
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>F1 Stream - Schedule</title>
|
||||
</svelte:head>
|
||||
|
||||
<div class="max-w-6xl mx-auto px-4 py-6">
|
||||
{#if loading}
|
||||
<div class="flex items-center justify-center py-20">
|
||||
<div class="w-8 h-8 border-2 border-f1-red border-t-transparent rounded-full animate-spin"></div>
|
||||
<span class="ml-3 text-f1-text-muted">Loading schedule...</span>
|
||||
</div>
|
||||
{:else if errorMsg}
|
||||
<div class="bg-red-900/30 border border-red-700 rounded-lg p-4 text-center">
|
||||
<p class="text-red-300">Failed to load schedule: {errorMsg}</p>
|
||||
<button onclick={loadSchedule} class="mt-2 px-4 py-1 bg-f1-red text-white rounded text-sm hover:bg-f1-red-dark transition-colors">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
{:else if scheduleData}
|
||||
<!-- Next Session Countdown -->
|
||||
{#if nextSession}
|
||||
<div class="mb-8 bg-f1-surface border border-f1-border rounded-lg p-6">
|
||||
<div class="flex flex-col sm:flex-row sm:items-center sm:justify-between gap-2">
|
||||
<div>
|
||||
<p class="text-f1-text-muted text-sm uppercase tracking-wider">
|
||||
{nextSession.session.status === 'live' ? 'Live Now' : 'Next Session'}
|
||||
</p>
|
||||
<h2 class="text-xl font-bold text-white mt-1">
|
||||
{nextSession.race.race_name} - {nextSession.session.name}
|
||||
</h2>
|
||||
<p class="text-f1-text-muted text-sm mt-1">
|
||||
{nextSession.race.circuit} · {nextSession.race.country}
|
||||
</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
{#if nextSession.session.status === 'live'}
|
||||
<a href="/watch" class="inline-flex items-center gap-2 px-5 py-2 bg-f1-red text-white font-semibold rounded-lg hover:bg-f1-red-dark transition-colors">
|
||||
<span class="w-2 h-2 rounded-full bg-white animate-pulse"></span>
|
||||
Watch Live
|
||||
</a>
|
||||
{:else}
|
||||
<p class="text-2xl font-mono font-bold text-white">{countdown(nextSession.session.start_utc)}</p>
|
||||
<p class="text-f1-text-muted text-sm">{formatLocalTime(nextSession.session.start_utc)}</p>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Season Header -->
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<h1 class="text-2xl font-bold text-white">{scheduleData.season} Season</h1>
|
||||
<span class="text-xs text-f1-text-muted">{scheduleData.races.length} races</span>
|
||||
</div>
|
||||
|
||||
<!-- Race List -->
|
||||
<div class="space-y-4">
|
||||
{#each scheduleData.races as race (race.round)}
|
||||
{@const isPast = raceIsPast(race)}
|
||||
<div class="bg-f1-surface border border-f1-border rounded-lg overflow-hidden {isPast ? 'opacity-50' : ''}">
|
||||
<!-- Race Header -->
|
||||
<div class="px-4 py-3 flex items-center justify-between">
|
||||
<div class="flex items-center gap-3">
|
||||
<span class="text-f1-text-muted text-sm font-mono w-8">R{race.round}</span>
|
||||
<div>
|
||||
<h3 class="font-semibold text-white">{race.race_name}</h3>
|
||||
<p class="text-xs text-f1-text-muted">{race.circuit} · {race.locality}, {race.country}</p>
|
||||
</div>
|
||||
</div>
|
||||
<span class="text-sm text-f1-text-muted">{formatShortDate(race.date)}</span>
|
||||
</div>
|
||||
|
||||
<!-- Sessions -->
|
||||
<div class="border-t border-f1-border">
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-px bg-f1-border">
|
||||
{#each race.sessions as session}
|
||||
{@const isLive = session.status === 'live'}
|
||||
{@const isClickable = isLive}
|
||||
<div class="bg-f1-surface px-3 py-2 {isLive ? 'bg-f1-red/10' : ''} {isClickable ? 'hover:bg-f1-surface-hover cursor-pointer' : ''}">
|
||||
{#if isClickable}
|
||||
<a href="/watch?session={session.type}&round={race.round}" class="block">
|
||||
<div class="flex items-center justify-between">
|
||||
<span class="text-sm font-medium text-white">{session.name}</span>
|
||||
<span class="text-[10px] font-bold uppercase px-1.5 py-0.5 rounded {statusClasses(session.status)}">
|
||||
{session.status}
|
||||
</span>
|
||||
</div>
|
||||
<p class="text-xs text-f1-text-muted mt-0.5">{formatTime(session.start_utc)}</p>
|
||||
</a>
|
||||
{:else}
|
||||
<div class="flex items-center justify-between">
|
||||
<span class="text-sm font-medium {session.status === 'past' ? 'text-f1-text-muted' : 'text-white'}">{session.name}</span>
|
||||
<span class="text-[10px] font-bold uppercase px-1.5 py-0.5 rounded {statusClasses(session.status)}">
|
||||
{session.status}
|
||||
</span>
|
||||
</div>
|
||||
<p class="text-xs text-f1-text-muted mt-0.5">
|
||||
{formatTime(session.start_utc)}
|
||||
{#if session.status === 'upcoming'}
|
||||
· {countdown(session.start_utc)}
|
||||
{/if}
|
||||
</p>
|
||||
{/if}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
435
stacks/f1-stream/files/frontend/src/routes/watch/+page.svelte
Normal file
435
stacks/f1-stream/files/frontend/src/routes/watch/+page.svelte
Normal file
|
|
@ -0,0 +1,435 @@
|
|||
<script>
|
||||
import { fetchStreams, fetchSchedule, getProxyUrl, activateStream, deactivateStream } from '$lib/api.js';
|
||||
import { onMount, onDestroy } from 'svelte';
|
||||
import { page } from '$app/state';
|
||||
|
||||
// Lazy-load hls.js to code-split it into a separate chunk
|
||||
let Hls = $state(null);
|
||||
|
||||
// Query params
|
||||
let sessionType = $derived(page.url?.searchParams?.get('session') || '');
|
||||
let roundNumber = $derived(page.url?.searchParams?.get('round') || '');
|
||||
|
||||
// State
|
||||
let streamsData = $state(null);
|
||||
let scheduleData = $state(null);
|
||||
let loading = $state(true);
|
||||
let errorMsg = $state(null);
|
||||
|
||||
// Multi-stream player state: array of active player slots
|
||||
let players = $state([]);
|
||||
const MAX_PLAYERS = 4;
|
||||
|
||||
// Current session info from schedule
|
||||
let currentRace = $derived.by(() => {
|
||||
if (!scheduleData?.races || !roundNumber) return null;
|
||||
return scheduleData.races.find(r => r.round === parseInt(roundNumber));
|
||||
});
|
||||
|
||||
let currentSession = $derived.by(() => {
|
||||
if (!currentRace || !sessionType) return null;
|
||||
return currentRace.sessions.find(s => s.type === sessionType);
|
||||
});
|
||||
|
||||
// Layout class based on player count
|
||||
let layoutClass = $derived.by(() => {
|
||||
const count = players.length;
|
||||
if (count <= 1) return 'grid-cols-1';
|
||||
if (count === 2) return 'grid-cols-2';
|
||||
return 'grid-cols-2'; // 3-4 players: 2x2 grid
|
||||
});
|
||||
|
||||
onMount(async () => {
|
||||
const hlsModule = await import('hls.js');
|
||||
Hls = hlsModule.default;
|
||||
loadData();
|
||||
document.addEventListener('fullscreenchange', onFullscreenChange);
|
||||
});
|
||||
|
||||
onDestroy(() => {
|
||||
// Clean up all players
|
||||
for (const player of players) {
|
||||
cleanupPlayer(player);
|
||||
}
|
||||
if (typeof document !== 'undefined') {
|
||||
document.removeEventListener('fullscreenchange', onFullscreenChange);
|
||||
}
|
||||
});
|
||||
|
||||
async function loadData() {
|
||||
loading = true;
|
||||
errorMsg = null;
|
||||
try {
|
||||
const [streamsResult, scheduleResult] = await Promise.all([
|
||||
fetchStreams(),
|
||||
fetchSchedule()
|
||||
]);
|
||||
streamsData = streamsResult;
|
||||
scheduleData = scheduleResult;
|
||||
} catch (e) {
|
||||
errorMsg = e.message;
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
function cleanupPlayer(player) {
|
||||
if (player.hls) {
|
||||
player.hls.destroy();
|
||||
player.hls = null;
|
||||
}
|
||||
if (player.originalUrl) {
|
||||
deactivateStream(player.originalUrl).catch(() => {});
|
||||
}
|
||||
if (player.controlsTimer) {
|
||||
clearTimeout(player.controlsTimer);
|
||||
}
|
||||
}
|
||||
|
||||
function removePlayer(index) {
|
||||
const player = players[index];
|
||||
cleanupPlayer(player);
|
||||
players = players.filter((_, i) => i !== index);
|
||||
}
|
||||
|
||||
function isStreamActive(url) {
|
||||
return players.some(p => p.originalUrl === url);
|
||||
}
|
||||
|
||||
function playStream(stream) {
|
||||
if (!Hls) return;
|
||||
|
||||
// If already playing this stream, don't add a duplicate
|
||||
if (isStreamActive(stream.url)) return;
|
||||
|
||||
// If at max players, replace the last one
|
||||
if (players.length >= MAX_PLAYERS) {
|
||||
removePlayer(players.length - 1);
|
||||
}
|
||||
|
||||
const proxyUrl = getProxyUrl(stream.url);
|
||||
const newPlayer = {
|
||||
id: Date.now(),
|
||||
proxyUrl,
|
||||
originalUrl: stream.url,
|
||||
siteKey: stream.site_key || '',
|
||||
siteName: stream.site_name || stream.site_key || 'Unknown',
|
||||
quality: stream.quality || '',
|
||||
isPlaying: false,
|
||||
isMuted: false,
|
||||
volume: 1,
|
||||
showControls: true,
|
||||
error: null,
|
||||
videoEl: null,
|
||||
containerEl: null,
|
||||
hls: null,
|
||||
controlsTimer: null,
|
||||
};
|
||||
|
||||
players = [...players, newPlayer];
|
||||
|
||||
// Activate stream for token refresh
|
||||
activateStream(stream.url, stream.site_key || '').catch(() => {});
|
||||
|
||||
// Wait for DOM to update then initialize player
|
||||
requestAnimationFrame(() => {
|
||||
requestAnimationFrame(() => {
|
||||
initPlayer(players.length - 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function initPlayer(index) {
|
||||
const player = players[index];
|
||||
if (!player || !player.videoEl) return;
|
||||
|
||||
if (Hls.isSupported()) {
|
||||
const hlsInstance = new Hls({
|
||||
enableWorker: true,
|
||||
lowLatencyMode: true,
|
||||
backBufferLength: 90
|
||||
});
|
||||
|
||||
hlsInstance.loadSource(player.proxyUrl);
|
||||
hlsInstance.attachMedia(player.videoEl);
|
||||
|
||||
hlsInstance.on(Hls.Events.MANIFEST_PARSED, () => {
|
||||
player.videoEl.play().catch(() => {});
|
||||
players[index] = { ...player, isPlaying: true, hls: hlsInstance };
|
||||
});
|
||||
|
||||
hlsInstance.on(Hls.Events.ERROR, (event, data) => {
|
||||
if (data.fatal) {
|
||||
switch (data.type) {
|
||||
case Hls.ErrorTypes.NETWORK_ERROR:
|
||||
players[index] = { ...players[index], error: `Network error: ${data.details}` };
|
||||
hlsInstance.startLoad();
|
||||
break;
|
||||
case Hls.ErrorTypes.MEDIA_ERROR:
|
||||
players[index] = { ...players[index], error: `Media error: ${data.details}` };
|
||||
hlsInstance.recoverMediaError();
|
||||
break;
|
||||
default:
|
||||
players[index] = { ...players[index], error: `Fatal error: ${data.details}` };
|
||||
removePlayer(index);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
player.hls = hlsInstance;
|
||||
} else if (player.videoEl.canPlayType('application/vnd.apple.mpegurl')) {
|
||||
// Native HLS (Safari)
|
||||
player.videoEl.src = player.proxyUrl;
|
||||
player.videoEl.addEventListener('loadedmetadata', () => {
|
||||
player.videoEl.play().catch(() => {});
|
||||
players[index] = { ...player, isPlaying: true };
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function togglePlay(index) {
|
||||
const player = players[index];
|
||||
if (!player?.videoEl) return;
|
||||
if (player.videoEl.paused) {
|
||||
player.videoEl.play().catch(() => {});
|
||||
players[index] = { ...player, isPlaying: true };
|
||||
} else {
|
||||
player.videoEl.pause();
|
||||
players[index] = { ...player, isPlaying: false };
|
||||
}
|
||||
}
|
||||
|
||||
function toggleMute(index) {
|
||||
const player = players[index];
|
||||
if (!player?.videoEl) return;
|
||||
const newMuted = !player.isMuted;
|
||||
player.videoEl.muted = newMuted;
|
||||
players[index] = { ...player, isMuted: newMuted };
|
||||
}
|
||||
|
||||
function setVolume(index, e) {
|
||||
const player = players[index];
|
||||
if (!player?.videoEl) return;
|
||||
const vol = parseFloat(e.target.value);
|
||||
player.videoEl.volume = vol;
|
||||
const muted = vol === 0;
|
||||
player.videoEl.muted = muted;
|
||||
players[index] = { ...player, volume: vol, isMuted: muted };
|
||||
}
|
||||
|
||||
function toggleFullscreen(index) {
|
||||
const player = players[index];
|
||||
if (!player?.containerEl) return;
|
||||
if (!document.fullscreenElement) {
|
||||
player.containerEl.requestFullscreen().catch(() => {});
|
||||
} else {
|
||||
document.exitFullscreen().catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
let isFullscreen = $state(false);
|
||||
function onFullscreenChange() {
|
||||
isFullscreen = !!document.fullscreenElement;
|
||||
}
|
||||
|
||||
function onPlayerMouseMove(index) {
|
||||
const player = players[index];
|
||||
if (!player) return;
|
||||
if (player.controlsTimer) clearTimeout(player.controlsTimer);
|
||||
players[index] = { ...player, showControls: true };
|
||||
const timer = setTimeout(() => {
|
||||
if (players[index]?.isPlaying) {
|
||||
players[index] = { ...players[index], showControls: false };
|
||||
}
|
||||
}, 3000);
|
||||
players[index] = { ...players[index], controlsTimer: timer };
|
||||
}
|
||||
|
||||
function responseTimeColor(ms) {
|
||||
if (ms < 500) return 'text-green-400';
|
||||
if (ms < 1500) return 'text-yellow-400';
|
||||
return 'text-red-400';
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>F1 Stream - Watch{currentRace ? ` - ${currentRace.race_name}` : ''}</title>
|
||||
</svelte:head>
|
||||
|
||||
<div class="max-w-7xl mx-auto px-4 py-6">
|
||||
<!-- Session Info Header -->
|
||||
{#if currentRace && currentSession}
|
||||
<div class="mb-6">
|
||||
<p class="text-f1-text-muted text-sm uppercase tracking-wider">
|
||||
Round {currentRace.round} · {currentSession.name}
|
||||
</p>
|
||||
<h1 class="text-2xl font-bold text-white">{currentRace.race_name}</h1>
|
||||
<p class="text-f1-text-muted text-sm">{currentRace.circuit} · {currentRace.country}</p>
|
||||
</div>
|
||||
{:else}
|
||||
<h1 class="text-2xl font-bold text-white mb-6">Watch</h1>
|
||||
{/if}
|
||||
|
||||
<!-- Multi-Stream Players Grid -->
|
||||
{#if players.length > 0}
|
||||
<div class="grid {layoutClass} gap-2 mb-6">
|
||||
{#each players as player, i (player.id)}
|
||||
<div
|
||||
class="bg-black rounded-lg overflow-hidden relative group"
|
||||
bind:this={player.containerEl}
|
||||
onmousemove={() => onPlayerMouseMove(i)}
|
||||
role="region"
|
||||
aria-label="Video player {i + 1}"
|
||||
>
|
||||
<!-- Stream label -->
|
||||
<div class="absolute top-2 left-2 z-10 bg-black/60 rounded px-2 py-0.5 text-xs text-white">
|
||||
{player.siteName}{#if player.quality} · {player.quality}{/if}
|
||||
</div>
|
||||
|
||||
<!-- Close button -->
|
||||
<button
|
||||
onclick={() => removePlayer(i)}
|
||||
class="absolute top-2 right-2 z-10 bg-black/60 rounded-full w-6 h-6 flex items-center justify-center text-white hover:text-f1-red hover:bg-black/80 transition-colors"
|
||||
aria-label="Close stream"
|
||||
>
|
||||
<svg class="w-3.5 h-3.5" fill="currentColor" viewBox="0 0 24 24"><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/></svg>
|
||||
</button>
|
||||
|
||||
<!-- Video -->
|
||||
<video
|
||||
bind:this={player.videoEl}
|
||||
class="w-full aspect-video bg-black"
|
||||
playsinline
|
||||
></video>
|
||||
|
||||
<!-- Controls Overlay -->
|
||||
<div class="absolute bottom-0 left-0 right-0 bg-gradient-to-t from-black/80 to-transparent px-3 py-2 transition-opacity duration-300 {player.showControls ? 'opacity-100' : 'opacity-0'}">
|
||||
<div class="flex items-center gap-2">
|
||||
<button onclick={() => togglePlay(i)} class="text-white hover:text-f1-red transition-colors" aria-label={player.isPlaying ? 'Pause' : 'Play'}>
|
||||
{#if player.isPlaying}
|
||||
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 24 24"><path d="M6 4h4v16H6V4zm8 0h4v16h-4V4z"/></svg>
|
||||
{:else}
|
||||
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 24 24"><path d="M8 5v14l11-7z"/></svg>
|
||||
{/if}
|
||||
</button>
|
||||
|
||||
<button onclick={() => toggleMute(i)} class="text-white hover:text-f1-red transition-colors" aria-label={player.isMuted ? 'Unmute' : 'Mute'}>
|
||||
{#if player.isMuted || player.volume === 0}
|
||||
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 24 24"><path d="M16.5 12c0-1.77-1.02-3.29-2.5-4.03v2.21l2.45 2.45c.03-.2.05-.41.05-.63zm2.5 0c0 .94-.2 1.82-.54 2.64l1.51 1.51C20.63 14.91 21 13.5 21 12c0-4.28-2.99-7.86-7-8.77v2.06c2.89.86 5 3.54 5 6.71zM4.27 3L3 4.27 7.73 9H3v6h4l5 5v-6.73l4.25 4.25c-.67.52-1.42.93-2.25 1.18v2.06c1.38-.31 2.63-.95 3.69-1.81L19.73 21 21 19.73l-9-9L4.27 3zM12 4L9.91 6.09 12 8.18V4z"/></svg>
|
||||
{:else}
|
||||
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 24 24"><path d="M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02z"/></svg>
|
||||
{/if}
|
||||
</button>
|
||||
<input
|
||||
type="range" min="0" max="1" step="0.05"
|
||||
value={player.volume}
|
||||
oninput={(e) => setVolume(i, e)}
|
||||
class="w-16 h-1 accent-f1-red"
|
||||
aria-label="Volume"
|
||||
/>
|
||||
|
||||
<div class="flex-1"></div>
|
||||
|
||||
<button onclick={() => toggleFullscreen(i)} class="text-white hover:text-f1-red transition-colors" aria-label="Fullscreen">
|
||||
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 24 24"><path d="M7 14H5v5h5v-2H7v-3zm-2-4h2V7h3V5H5v5zm12 7h-3v2h5v-5h-2v3zM14 5v2h3v3h2V5h-5z"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Error overlay -->
|
||||
{#if player.error}
|
||||
<div class="absolute bottom-12 left-2 right-2 bg-red-900/80 rounded px-2 py-1 text-xs text-red-300">
|
||||
{player.error}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Stream List -->
|
||||
{#if loading}
|
||||
<div class="flex items-center justify-center py-20">
|
||||
<div class="w-8 h-8 border-2 border-f1-red border-t-transparent rounded-full animate-spin"></div>
|
||||
<span class="ml-3 text-f1-text-muted">Loading streams...</span>
|
||||
</div>
|
||||
{:else if errorMsg}
|
||||
<div class="bg-red-900/30 border border-red-700 rounded-lg p-4 text-center">
|
||||
<p class="text-red-300">Failed to load streams: {errorMsg}</p>
|
||||
<button onclick={loadData} class="mt-2 px-4 py-1 bg-f1-red text-white rounded text-sm hover:bg-f1-red-dark transition-colors">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
{:else if streamsData}
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<h2 class="text-lg font-semibold text-white">
|
||||
Available Streams
|
||||
<span class="text-f1-text-muted font-normal text-sm ml-2">({streamsData.count})</span>
|
||||
</h2>
|
||||
<div class="flex items-center gap-4">
|
||||
{#if players.length > 0}
|
||||
<span class="text-xs text-f1-text-muted">{players.length}/{MAX_PLAYERS} streams active</span>
|
||||
{/if}
|
||||
<button onclick={loadData} class="text-xs text-f1-text-muted hover:text-white transition-colors uppercase tracking-wider">
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if streamsData.streams.length === 0}
|
||||
<div class="bg-f1-surface border border-f1-border rounded-lg p-8 text-center">
|
||||
<p class="text-f1-text-muted">No streams available right now.</p>
|
||||
<p class="text-f1-text-muted text-sm mt-2">Streams appear when a session is live. Check the schedule for upcoming sessions.</p>
|
||||
<a href="/" class="inline-block mt-4 px-4 py-2 bg-f1-surface-hover border border-f1-border rounded text-sm text-white hover:border-f1-red transition-colors">
|
||||
View Schedule
|
||||
</a>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="space-y-2">
|
||||
{#each streamsData.streams as stream, i}
|
||||
{@const active = isStreamActive(stream.url)}
|
||||
<div class="bg-f1-surface border rounded-lg px-4 py-3 flex items-center gap-4 {active ? 'border-f1-red' : 'border-f1-border hover:border-f1-border'}">
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex items-center gap-2">
|
||||
<span class="text-sm font-medium text-white truncate">{stream.site_name || stream.site_key || 'Unknown'}</span>
|
||||
{#if stream.is_live}
|
||||
<span class="text-[10px] font-bold uppercase px-1.5 py-0.5 rounded bg-f1-red text-white">Live</span>
|
||||
{/if}
|
||||
{#if active}
|
||||
<span class="text-[10px] font-bold uppercase px-1.5 py-0.5 rounded bg-green-600 text-white">Playing</span>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="flex items-center gap-3 mt-1 text-xs text-f1-text-muted">
|
||||
{#if stream.title}
|
||||
<span class="truncate">{stream.title}</span>
|
||||
{/if}
|
||||
{#if stream.quality}
|
||||
<span>{stream.quality}</span>
|
||||
{/if}
|
||||
{#if stream.response_time_ms != null}
|
||||
<span class={responseTimeColor(stream.response_time_ms)}>
|
||||
{stream.response_time_ms}ms
|
||||
</span>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center gap-2">
|
||||
{#if !active}
|
||||
<button
|
||||
onclick={() => playStream(stream)}
|
||||
class="px-4 py-1.5 rounded text-sm font-medium bg-f1-red text-white hover:bg-f1-red-dark transition-colors"
|
||||
>
|
||||
{players.length > 0 ? 'Add' : 'Watch'}
|
||||
</button>
|
||||
{:else}
|
||||
<span class="text-xs text-green-400">Active</span>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
19
stacks/f1-stream/files/frontend/svelte.config.js
Normal file
19
stacks/f1-stream/files/frontend/svelte.config.js
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import adapter from '@sveltejs/adapter-static';
|
||||
|
||||
/** @type {import('@sveltejs/kit').Config} */
|
||||
const config = {
|
||||
kit: {
|
||||
adapter: adapter({
|
||||
pages: 'build',
|
||||
assets: 'build',
|
||||
fallback: 'index.html',
|
||||
precompress: false,
|
||||
strict: true
|
||||
}),
|
||||
paths: {
|
||||
base: ''
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default config;
|
||||
10
stacks/f1-stream/files/frontend/vite.config.js
Normal file
10
stacks/f1-stream/files/frontend/vite.config.js
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import { sveltekit } from '@sveltejs/kit/vite';
|
||||
import tailwindcss from '@tailwindcss/vite';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
tailwindcss(),
|
||||
sveltekit()
|
||||
]
|
||||
});
|
||||
|
|
@ -36,8 +36,9 @@ resource "kubernetes_deployment" "f1-stream" {
|
|||
}
|
||||
spec {
|
||||
container {
|
||||
image = "viktorbarzin/f1-stream:v3.0.0"
|
||||
name = "f1-stream"
|
||||
image = "viktorbarzin/f1-stream:latest"
|
||||
image_pull_policy = "Always"
|
||||
name = "f1-stream"
|
||||
resources {
|
||||
limits = {
|
||||
cpu = "500m"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue