[ci skip] Add native HLS playback for VIPLeague/DaddyLive streams (v1.3.1)

- Add HLS proxy (hlsproxy) for rewriting m3u8 playlists and proxying
  segments with correct Referer/Origin headers (uses ?domain= param)
- Add playerconfig service for detecting stream types (VIPLeague,
  DaddyLive, HLS) and extracting auth params from ksohls pages
- Add VIPLeague URL resolution: extract slug from URL path, match
  against DaddyLive 24/7 channel index with token-based scoring
- Replace Clappr with direct HLS.js player for better compatibility
- Add CryptoJS CDN for DaddyLive auth module support
- Disable CrowdSec on f1-stream ingress to prevent false positives
- Bump image to v1.3.1
This commit is contained in:
Viktor Barzin 2026-02-22 01:30:06 +00:00
parent a5f9c1595f
commit 0ff2aaec60
10 changed files with 1049 additions and 51 deletions

View file

@ -0,0 +1,209 @@
package hlsproxy
import (
"bufio"
"encoding/base64"
"io"
"log"
"net/http"
"net/url"
"strings"
)
// NewHandler returns an http.Handler for /hls/{base64url_encoded_full_url}.
// It proxies HLS playlists and segments, rewriting m3u8 URLs to route
// through the proxy and forwarding X-Hls-Forward-* headers upstream.
func NewHandler() http.Handler {
client := &http.Client{
Timeout: 30_000_000_000, // 30s
CheckRedirect: func(req *http.Request, via []*http.Request) error {
if len(via) >= 5 {
return http.ErrUseLastResponse
}
return nil
},
}
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodOptions {
setCORS(w)
w.WriteHeader(http.StatusNoContent)
return
}
// Parse: /hls/{base64url_encoded_full_url}
trimmed := strings.TrimPrefix(r.URL.Path, "/hls/")
if trimmed == "" || trimmed == r.URL.Path {
http.Error(w, "bad hls proxy URL", http.StatusBadRequest)
return
}
// Decode the full upstream URL from base64url
upstreamURL, err := base64.RawURLEncoding.DecodeString(trimmed)
if err != nil {
http.Error(w, "invalid base64url", http.StatusBadRequest)
return
}
target := string(upstreamURL)
parsed, err := url.Parse(target)
if err != nil || (parsed.Scheme != "http" && parsed.Scheme != "https") {
http.Error(w, "invalid upstream URL", http.StatusBadRequest)
return
}
log.Printf("hlsproxy: %s -> %s", r.URL.Path, target)
upReq, err := http.NewRequestWithContext(r.Context(), http.MethodGet, target, nil)
if err != nil {
http.Error(w, "failed to create request", http.StatusInternalServerError)
return
}
// Set Referer and Origin. If the URL has a ?domain= param (CDN segments),
// use that domain as the origin so the CDN accepts the request.
refererOrigin := parsed.Scheme + "://" + parsed.Host
if domainParam := parsed.Query().Get("domain"); domainParam != "" {
refererOrigin = "https://" + domainParam
}
upReq.Header.Set("Referer", refererOrigin+"/")
upReq.Header.Set("Origin", refererOrigin)
upReq.Header.Set("User-Agent", r.Header.Get("User-Agent"))
// Forward X-Hls-Forward-* headers (strip prefix)
for key, vals := range r.Header {
if strings.HasPrefix(key, "X-Hls-Forward-") {
realKey := strings.TrimPrefix(key, "X-Hls-Forward-")
for _, v := range vals {
upReq.Header.Set(realKey, v)
}
}
}
resp, err := client.Do(upReq)
if err != nil {
log.Printf("hlsproxy: upstream fetch failed: %v", err)
http.Error(w, "upstream fetch failed", http.StatusBadGateway)
return
}
defer resp.Body.Close()
log.Printf("hlsproxy: %s <- %d (%s)", truncPath(r.URL.Path, 60), resp.StatusCode, resp.Header.Get("Content-Type"))
setCORS(w)
ct := resp.Header.Get("Content-Type")
isM3U8 := strings.Contains(ct, "mpegurl") ||
strings.Contains(ct, "x-mpegURL") ||
strings.HasSuffix(parsed.Path, ".m3u8")
if isM3U8 {
w.Header().Set("Content-Type", "application/vnd.apple.mpegurl")
w.WriteHeader(resp.StatusCode)
rewriteM3U8(w, resp.Body, target)
return
}
// Stream segment or other content directly
for key, vals := range resp.Header {
lk := strings.ToLower(key)
if lk == "content-type" || lk == "content-length" || lk == "cache-control" || lk == "accept-ranges" {
for _, v := range vals {
w.Header().Add(key, v)
}
}
}
w.WriteHeader(resp.StatusCode)
io.Copy(w, resp.Body)
})
}
// rewriteM3U8 reads an m3u8 playlist from r, rewrites segment/playlist URLs
// to route through /hls/{b64}, and writes the result to w.
func rewriteM3U8(w io.Writer, r io.Reader, playlistURL string) {
base, err := url.Parse(playlistURL)
if err != nil {
io.Copy(w, r)
return
}
scanner := bufio.NewScanner(r)
scanner.Buffer(make([]byte, 0, 64*1024), 1024*1024)
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, "#") {
// Rewrite URI="..." in directives like #EXT-X-KEY, #EXT-X-MAP
rewritten := rewriteURIAttribute(line, base)
w.Write([]byte(rewritten))
w.Write([]byte("\n"))
continue
}
// Non-comment, non-empty lines are URLs
trimmed := strings.TrimSpace(line)
if trimmed == "" {
w.Write([]byte("\n"))
continue
}
resolved := resolveURL(base, trimmed)
encoded := encodeHLSURL(resolved)
w.Write([]byte(encoded))
w.Write([]byte("\n"))
}
}
// rewriteURIAttribute rewrites URI="..." attributes in HLS directives.
func rewriteURIAttribute(line string, base *url.URL) string {
// Look for URI="..." (case insensitive)
uriIdx := strings.Index(strings.ToUpper(line), "URI=\"")
if uriIdx == -1 {
return line
}
// Find the actual position (preserving original case)
prefix := line[:uriIdx+5] // everything up to and including URI="
rest := line[uriIdx+5:]
endQuote := strings.Index(rest, "\"")
if endQuote == -1 {
return line
}
uri := rest[:endQuote]
suffix := rest[endQuote:] // closing quote and anything after
resolved := resolveURL(base, uri)
encoded := encodeHLSURL(resolved)
return prefix + encoded + suffix
}
// resolveURL resolves a potentially relative URL against a base URL.
func resolveURL(base *url.URL, ref string) string {
refURL, err := url.Parse(ref)
if err != nil {
return ref
}
return base.ResolveReference(refURL).String()
}
// encodeHLSURL encodes a full URL into /hls/{base64url} format.
func encodeHLSURL(fullURL string) string {
encoded := base64.RawURLEncoding.EncodeToString([]byte(fullURL))
return "/hls/" + encoded
}
func setCORS(w http.ResponseWriter) {
w.Header().Set("Access-Control-Allow-Origin", "*")
w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS")
w.Header().Set("Access-Control-Allow-Headers", "*")
}
func truncPath(s string, n int) string {
if len(s) <= n {
return s
}
return s[:n] + "..."
}

View file

@ -0,0 +1,512 @@
package playerconfig
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"regexp"
"strings"
"sync"
"time"
)
// PlayerConfig is returned by the /api/streams/{id}/player-config endpoint.
type PlayerConfig struct {
Type string `json:"type"`
HLSURL string `json:"hls_url,omitempty"`
AuthToken string `json:"auth_token,omitempty"`
ChannelKey string `json:"channel_key,omitempty"`
ChannelSalt string `json:"channel_salt,omitempty"`
Timestamp string `json:"timestamp,omitempty"`
AuthModURL string `json:"auth_mod_url,omitempty"`
ServerKey string `json:"server_key,omitempty"`
Error string `json:"error,omitempty"`
}
type cacheEntry struct {
config *PlayerConfig
expiresAt time.Time
}
// Service handles stream type detection and DaddyLive config extraction.
type Service struct {
client *http.Client
mu sync.RWMutex
cache map[string]*cacheEntry
}
// New creates a new playerconfig Service.
func New() *Service {
return &Service{
client: &http.Client{
Timeout: 15 * time.Second,
},
cache: make(map[string]*cacheEntry),
}
}
// DetectStreamType returns "hls", "daddylive", "vipleague", or "proxy" based on the URL.
func DetectStreamType(rawURL string) string {
lower := strings.ToLower(rawURL)
if strings.HasSuffix(strings.SplitN(lower, "?", 2)[0], ".m3u8") {
return "hls"
}
daddyPatterns := []string{"dlhd.link", "dlhd.sx", "dlhd.dad", "daddylive", "ksohls.ru"}
for _, p := range daddyPatterns {
if strings.Contains(lower, p) {
return "daddylive"
}
}
vipPatterns := []string{"vipleague.io", "vipleague.im", "vipleague.cc", "casthill.net"}
for _, p := range vipPatterns {
if strings.Contains(lower, p) {
return "vipleague"
}
}
return "proxy"
}
// GetConfig returns a PlayerConfig for the given stream URL.
func (s *Service) GetConfig(ctx context.Context, rawURL string) *PlayerConfig {
streamType := DetectStreamType(rawURL)
switch streamType {
case "hls":
encoded := base64.RawURLEncoding.EncodeToString([]byte(rawURL))
return &PlayerConfig{
Type: "hls",
HLSURL: "/hls/" + encoded,
}
case "daddylive":
return s.getDaddyLiveConfig(ctx, rawURL)
case "vipleague":
return s.getVIPLeagueConfig(ctx, rawURL)
default:
return &PlayerConfig{Type: "proxy"}
}
}
// Channel ID extraction patterns
var channelIDPatterns = []*regexp.Regexp{
regexp.MustCompile(`stream-(\d+)\.php`),
regexp.MustCompile(`[?&]id=(\d+)`),
regexp.MustCompile(`/(\d+)\.php`),
}
// Page content extraction patterns
var (
iframeRe = regexp.MustCompile(`<iframe[^>]*src=["'](https?://[^"']*ksohls\.ru[^"']*)["']`)
authTokenRe = regexp.MustCompile(`authToken\s*[:=]\s*['"]([^'"]+)['"]`)
channelKeyRe = regexp.MustCompile(`channelKey\s*[:=]\s*['"]([^'"]+)['"]`)
channelSaltRe = regexp.MustCompile(`channelSalt\s*[:=]\s*['"]([^'"]+)['"]`)
timestampRe = regexp.MustCompile(`timestamp\s*[:=]\s*['"]?(\d+)['"]?`)
authModRe = regexp.MustCompile(`<script[^>]*src=["'](https?://[^"']*aiaged\.fun[^"']*obfuscated[^"']*)["']`)
)
func (s *Service) getDaddyLiveConfig(ctx context.Context, rawURL string) *PlayerConfig {
// Check cache
s.mu.RLock()
if entry, ok := s.cache[rawURL]; ok && time.Now().Before(entry.expiresAt) {
s.mu.RUnlock()
return entry.config
}
s.mu.RUnlock()
config := s.fetchDaddyLiveConfig(ctx, rawURL)
// Cache the result (even errors, to avoid hammering)
s.mu.Lock()
s.cache[rawURL] = &cacheEntry{
config: config,
expiresAt: time.Now().Add(1 * time.Hour),
}
s.mu.Unlock()
return config
}
func (s *Service) fetchDaddyLiveConfig(ctx context.Context, rawURL string) *PlayerConfig {
// Step 1: Extract channel ID from URL
channelID := ""
for _, re := range channelIDPatterns {
if m := re.FindStringSubmatch(rawURL); len(m) > 1 {
channelID = m[1]
break
}
}
if channelID == "" {
return &PlayerConfig{Type: "proxy", Error: "could not extract channel ID"}
}
log.Printf("playerconfig: DaddyLive channel=%s from %s", channelID, rawURL)
return s.fetchDaddyLiveConfigByID(ctx, channelID)
}
func (s *Service) fetchDaddyLiveConfigByID(ctx context.Context, channelID string) *PlayerConfig {
// Step 2: Fetch the cast page to find the ksohls iframe
castURL := fmt.Sprintf("https://dlhd.link/cast/stream-%s.php", channelID)
castBody, err := s.fetchPage(ctx, castURL, "https://dlhd.link/")
if err != nil {
log.Printf("playerconfig: failed to fetch cast page: %v", err)
return &PlayerConfig{Type: "proxy", Error: "failed to fetch cast page"}
}
// Step 3: Extract ksohls iframe URL
iframeMatch := iframeRe.FindStringSubmatch(castBody)
if iframeMatch == nil {
log.Printf("playerconfig: no ksohls iframe found in cast page")
return &PlayerConfig{Type: "proxy", Error: "no ksohls iframe found"}
}
ksohURL := iframeMatch[1]
// Step 4: Fetch the ksohls page
referer := fmt.Sprintf("https://dlhd.link/stream/stream-%s.php", channelID)
ksohBody, err := s.fetchPage(ctx, ksohURL, referer)
if err != nil {
log.Printf("playerconfig: failed to fetch ksohls page: %v", err)
return &PlayerConfig{Type: "proxy", Error: "failed to fetch ksohls page"}
}
// Step 5: Extract auth params from ksohls page
config := &PlayerConfig{Type: "daddylive"}
if m := authTokenRe.FindStringSubmatch(ksohBody); len(m) > 1 {
config.AuthToken = m[1]
}
if m := channelKeyRe.FindStringSubmatch(ksohBody); len(m) > 1 {
config.ChannelKey = m[1]
}
if m := channelSaltRe.FindStringSubmatch(ksohBody); len(m) > 1 {
config.ChannelSalt = m[1]
}
if m := timestampRe.FindStringSubmatch(ksohBody); len(m) > 1 {
config.Timestamp = m[1]
}
if m := authModRe.FindStringSubmatch(ksohBody); len(m) > 1 {
config.AuthModURL = m[1]
}
if config.ChannelKey == "" {
log.Printf("playerconfig: no channelKey found in ksohls page")
return &PlayerConfig{Type: "proxy", Error: "no channelKey found"}
}
// Step 6: Server lookup
lookupURL := fmt.Sprintf("https://chevy.soyspace.cyou/server_lookup?channel_id=%s", config.ChannelKey)
lookupBody, err := s.fetchPage(ctx, lookupURL, "")
if err != nil {
log.Printf("playerconfig: server lookup failed: %v", err)
return &PlayerConfig{Type: "proxy", Error: "server lookup failed"}
}
var lookupResp struct {
ServerKey string `json:"server_key"`
}
if err := json.Unmarshal([]byte(lookupBody), &lookupResp); err != nil || lookupResp.ServerKey == "" {
log.Printf("playerconfig: failed to parse server lookup: %v body=%s", err, lookupBody)
return &PlayerConfig{Type: "proxy", Error: "server lookup parse failed"}
}
config.ServerKey = lookupResp.ServerKey
// Step 7: Build m3u8 URL
m3u8URL := fmt.Sprintf("https://chevy.soyspace.cyou/proxy/%s/%s/mono.m3u8",
config.ServerKey, config.ChannelKey)
encoded := base64.RawURLEncoding.EncodeToString([]byte(m3u8URL))
config.HLSURL = "/hls/" + encoded
log.Printf("playerconfig: DaddyLive config ready channel=%s server=%s", config.ChannelKey, config.ServerKey)
return config
}
// VIPLeague/casthill resolution
var zmidRe = regexp.MustCompile(`(?:const|var|let)\s+zmid\s*=\s*["']([^"']+)["']`)
var casthillVRe = regexp.MustCompile(`[?&]v=([^&]+)`)
func (s *Service) getVIPLeagueConfig(ctx context.Context, rawURL string) *PlayerConfig {
// Check cache using normalized URL
s.mu.RLock()
if entry, ok := s.cache[rawURL]; ok && time.Now().Before(entry.expiresAt) {
s.mu.RUnlock()
return entry.config
}
s.mu.RUnlock()
config := s.fetchVIPLeagueConfig(ctx, rawURL)
s.mu.Lock()
s.cache[rawURL] = &cacheEntry{
config: config,
expiresAt: time.Now().Add(1 * time.Hour),
}
s.mu.Unlock()
return config
}
func (s *Service) fetchVIPLeagueConfig(ctx context.Context, rawURL string) *PlayerConfig {
lower := strings.ToLower(rawURL)
var zmid string
if strings.Contains(lower, "casthill.net") {
// Extract zmid from casthill URL query param ?v=...
if m := casthillVRe.FindStringSubmatch(rawURL); len(m) > 1 {
zmid = m[1]
}
}
if zmid == "" {
// Try to fetch VIPLeague page and extract zmid from JavaScript
body, err := s.fetchPage(ctx, rawURL, "")
if err != nil {
log.Printf("playerconfig: failed to fetch VIPLeague page: %v, trying URL-based extraction", err)
} else {
if m := zmidRe.FindStringSubmatch(body); len(m) > 1 {
zmid = m[1]
}
}
}
if zmid == "" {
// Fallback: extract slug from URL path and use it directly for channel matching
// e.g. /f-1/sky-sports-f1-streaming → "sky sports f1"
zmid = extractSlugFromURL(rawURL)
if zmid != "" {
log.Printf("playerconfig: extracted slug %q from URL path", zmid)
}
}
if zmid == "" {
log.Printf("playerconfig: no zmid found for VIPLeague URL %s", rawURL)
return &PlayerConfig{Type: "proxy", Error: "no zmid found in VIPLeague page"}
}
log.Printf("playerconfig: VIPLeague zmid=%q from %s", zmid, rawURL)
channelID, err := s.resolveChannelID(ctx, zmid)
if err != nil {
log.Printf("playerconfig: failed to resolve zmid %q: %v", zmid, err)
return &PlayerConfig{Type: "proxy", Error: fmt.Sprintf("failed to resolve zmid: %v", err)}
}
log.Printf("playerconfig: resolved zmid=%q to DaddyLive channel=%s", zmid, channelID)
return s.fetchDaddyLiveConfigByID(ctx, channelID)
}
// extractSlugFromURL extracts a channel-matching slug from a VIPLeague URL path.
// e.g. "https://vipleague.io/f-1/sky-sports-f1-streaming" → "sky sports f1"
// Strips common suffixes like "-streaming", "-live-stream", "-live", etc.
func extractSlugFromURL(rawURL string) string {
// Get the last path segment
path := rawURL
if idx := strings.Index(path, "?"); idx != -1 {
path = path[:idx]
}
path = strings.TrimRight(path, "/")
lastSlash := strings.LastIndex(path, "/")
if lastSlash == -1 {
return ""
}
slug := path[lastSlash+1:]
// Strip common suffixes
for _, suffix := range []string{"-streaming", "-live-stream", "-stream", "-live", "-online", "-free"} {
slug = strings.TrimSuffix(slug, suffix)
}
// Replace hyphens with spaces for matching against channel names
slug = strings.ReplaceAll(slug, "-", " ")
slug = strings.TrimSpace(slug)
if slug == "" || len(slug) < 3 {
return ""
}
return slug
}
var channelLinkRe = regexp.MustCompile(`<a[^>]*href=["'][^"']*watch\.php\?id=(\d+)["'][^>]*data-title=["']([^"']+)["']`)
var channelLinkRe2 = regexp.MustCompile(`<a[^>]*data-title=["']([^"']+)["'][^>]*href=["'][^"']*watch\.php\?id=(\d+)["']`)
func (s *Service) resolveChannelID(ctx context.Context, zmid string) (string, error) {
channels, err := s.getChannelIndex(ctx)
if err != nil {
return "", err
}
zmidLower := strings.ToLower(zmid)
// Build tokens: if zmid contains spaces, split on spaces; otherwise use tokenizer
var tokens []string
if strings.Contains(zmidLower, " ") {
for _, word := range strings.Fields(zmidLower) {
if len(word) >= 2 {
tokens = append(tokens, word)
}
}
} else {
tokens = tokenize(zmidLower)
}
bestID := ""
bestScore := 0
bestNameLen := 0
for id, name := range channels {
score := 0
for _, tok := range tokens {
if strings.Contains(name, tok) {
score++
}
}
// Tiebreaker: prefer shorter names (more specific match) and
// English/UK channels which tend to have shorter names
if score > bestScore || (score == bestScore && score > 0 && len(name) < bestNameLen) {
bestScore = score
bestID = id
bestNameLen = len(name)
}
}
if bestID == "" || bestScore == 0 {
return "", fmt.Errorf("no channel matched zmid %q (tried %d channels)", zmid, len(channels))
}
log.Printf("playerconfig: zmid=%q matched channel %s (%s) with score %d/%d",
zmid, bestID, channels[bestID], bestScore, len(tokens))
return bestID, nil
}
func (s *Service) getChannelIndex(ctx context.Context) (map[string]string, error) {
const cacheKey = "__channel_index__"
s.mu.RLock()
if entry, ok := s.cache[cacheKey]; ok && time.Now().Before(entry.expiresAt) {
s.mu.RUnlock()
// Decode from the Error field (ab)used as storage
var idx map[string]string
if err := json.Unmarshal([]byte(entry.config.Error), &idx); err == nil {
return idx, nil
}
}
s.mu.RUnlock()
body, err := s.fetchPage(ctx, "https://dlhd.link/24-7-channels.php", "https://dlhd.link/")
if err != nil {
return nil, fmt.Errorf("failed to fetch channel index: %w", err)
}
channels := make(map[string]string)
// Try both attribute orderings
for _, m := range channelLinkRe.FindAllStringSubmatch(body, -1) {
channels[m[1]] = strings.ToLower(strings.TrimSpace(m[2]))
}
for _, m := range channelLinkRe2.FindAllStringSubmatch(body, -1) {
channels[m[2]] = strings.ToLower(strings.TrimSpace(m[1]))
}
if len(channels) == 0 {
return nil, fmt.Errorf("no channels found in 24/7 page (%d bytes)", len(body))
}
log.Printf("playerconfig: loaded %d channels from DaddyLive 24/7 page", len(channels))
// Cache as JSON in a fake PlayerConfig entry
encoded, _ := json.Marshal(channels)
s.mu.Lock()
s.cache[cacheKey] = &cacheEntry{
config: &PlayerConfig{Error: string(encoded)},
expiresAt: time.Now().Add(6 * time.Hour),
}
s.mu.Unlock()
return channels, nil
}
// tokenize splits a zmid slug into meaningful tokens.
// e.g. "skyf1" -> ["sky", "f1"], "daznf1" -> ["dazn", "f1"]
func tokenize(zmid string) []string {
// Common known prefixes/suffixes in sports streaming slugs
knownTokens := []string{
"sky", "sports", "f1", "dazn", "espn", "fox", "bein", "bt",
"star", "nbc", "cbs", "tnt", "abc", "tsn", "supersport",
"canal", "rtl", "viaplay", "premier", "main", "event",
"arena", "action", "cricket", "football", "tennis", "golf",
"racing", "news", "extra", "max", "hd", "uhd",
}
var tokens []string
remaining := zmid
for len(remaining) > 0 {
matched := false
for _, tok := range knownTokens {
if strings.HasPrefix(remaining, tok) {
tokens = append(tokens, tok)
remaining = remaining[len(tok):]
matched = true
break
}
}
if !matched {
// Try numeric suffix (like channel numbers)
i := 0
for i < len(remaining) && remaining[i] >= '0' && remaining[i] <= '9' {
i++
}
if i > 0 {
tokens = append(tokens, remaining[:i])
remaining = remaining[i:]
} else {
// Skip single character and try again
remaining = remaining[1:]
}
}
}
// If tokenization produced nothing useful, use the whole zmid as a single token
if len(tokens) == 0 {
tokens = []string{zmid}
}
return tokens
}
func (s *Service) fetchPage(ctx context.Context, pageURL, referer string) (string, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, pageURL, nil)
if err != nil {
return "", err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36")
if referer != "" {
req.Header.Set("Referer", referer)
}
resp, err := s.client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("status %d from %s", resp.StatusCode, pageURL)
}
body, err := io.ReadAll(io.LimitReader(resp.Body, 2*1024*1024)) // 2MB max
if err != nil {
return "", err
}
return string(body), nil
}

View file

@ -85,7 +85,7 @@ var _ce=document.createElement.bind(document);
document.createElement=function(t){
var el=_ce(t);
var tag=t.toLowerCase();
if(tag==='script'||tag==='iframe'||tag==='img'||tag==='link'||tag==='source'||tag==='video'||tag==='audio'){
if(tag==='script'||tag==='img'||tag==='link'||tag==='source'||tag==='video'||tag==='audio'){
var _ss=Object.getOwnPropertyDescriptor(HTMLElement.prototype,'src')||Object.getOwnPropertyDescriptor(el.__proto__,'src');
if(_ss&&_ss.set){Object.defineProperty(el,'src',{get:function(){return _ss.get?_ss.get.call(this):'';},set:function(v){_ss.set.call(this,rw(v));},configurable:true});}
}
@ -316,9 +316,6 @@ var rootRelativeAttrRe = regexp.MustCompile(`((?:src|href|action|poster|data)\s*
// Matches url("/...") or url('/...') or url(/...) in inline styles — but NOT url("//...")
var rootRelativeCSSRe = regexp.MustCompile(`(url\(\s*["']?)/([^/"')[^"')]*)(["']?\s*\))`)
// crossOriginIframeSrcRe matches <iframe src="https://..."> to proxy cross-origin embeds.
var crossOriginIframeSrcRe = regexp.MustCompile(`(<iframe[^>]*\ssrc\s*=\s*["'])(https?://[^"']+)(["'])`)
// disableDevtoolRe matches <script> tags that load disable-devtool or similar anti-debug libraries.
var disableDevtoolRe = regexp.MustCompile(`(?i)<script[^>]*(?:disable-devtool|devtools-detect)[^>]*>(?:</script>)?`)
@ -376,34 +373,18 @@ func rewriteHTML(body, origin, b64Origin string) string {
return m[1] + proxyPrefix + "/" + m[2] + m[3]
})
// 4. Rewrite cross-origin iframe src attributes to route through proxy
body = crossOriginIframeSrcRe.ReplaceAllStringFunc(body, func(match string) string {
m := crossOriginIframeSrcRe.FindStringSubmatch(match)
if len(m) < 4 {
return match
}
prefix, iframeURL, quote := m[1], m[2], m[3]
parsed, err := url.Parse(iframeURL)
if err != nil {
return match
}
iframeOrigin := parsed.Scheme + "://" + parsed.Host
iframeB64 := base64.RawURLEncoding.EncodeToString([]byte(iframeOrigin))
return prefix + "/proxy/" + iframeB64 + parsed.RequestURI() + quote
})
// 5. Strip anti-debugging scripts (disable-devtool, devtools-detect)
// 4. Strip anti-debugging scripts (disable-devtool, devtools-detect)
body = disableDevtoolRe.ReplaceAllString(body, "")
// 5b. Strip ad/popup scripts and context menu blockers
// 4b. Strip ad/popup scripts and context menu blockers
body = adScriptRe.ReplaceAllString(body, "")
body = adInlineRe.ReplaceAllString(body, "")
body = contextMenuBlockRe.ReplaceAllString(body, "")
// 5c. Strip debugger statements from inline scripts
// 4c. Strip debugger statements from inline scripts
body = debuggerStmtRe.ReplaceAllString(body, "/* */")
// 6. Inject JS shim right after <head> to intercept fetch/XHR/WebSocket
// 5. Inject JS shim right after <head> to intercept fetch/XHR/WebSocket
shim := fmt.Sprintf(jsShimTemplate, b64Origin, origin)
headIdx := strings.Index(strings.ToLower(body), "<head>")
if headIdx != -1 {

View file

@ -9,6 +9,8 @@ import (
"f1-stream/internal/auth"
"f1-stream/internal/extractor"
"f1-stream/internal/hlsproxy"
"f1-stream/internal/playerconfig"
"f1-stream/internal/proxy"
"f1-stream/internal/scraper"
"f1-stream/internal/store"
@ -18,15 +20,17 @@ type Server struct {
store *store.Store
auth *auth.Auth
scraper *scraper.Scraper
playerConfig *playerconfig.Service
mux *http.ServeMux
headlessEnabled bool
}
func New(s *store.Store, a *auth.Auth, sc *scraper.Scraper, origins []string, headlessEnabled bool) *Server {
func New(s *store.Store, a *auth.Auth, sc *scraper.Scraper, pc *playerconfig.Service, origins []string, headlessEnabled bool) *Server {
srv := &Server{
store: s,
auth: a,
scraper: sc,
playerConfig: pc,
mux: http.NewServeMux(),
headlessEnabled: headlessEnabled,
}
@ -67,6 +71,11 @@ func (s *Server) registerRoutes(origins []string) {
s.mux.Handle("HEAD /proxy/", proxyHandler)
s.mux.Handle("OPTIONS /proxy/", proxyHandler)
// HLS proxy for native video playback
hlsHandler := hlsproxy.NewHandler()
s.mux.Handle("GET /hls/", hlsHandler)
s.mux.Handle("OPTIONS /hls/", hlsHandler)
// Public API - wrap with middleware
wrapAll := func(h http.HandlerFunc) http.Handler {
return RecoveryMiddleware(LoggingMiddleware(originMw(authMw(h))))
@ -83,6 +92,7 @@ func (s *Server) registerRoutes(origins []string) {
// Public streams
s.mux.Handle("GET /api/streams/public", wrapAll(s.handlePublicStreams))
s.mux.Handle("GET /api/streams/{id}/browse", wrapAll(s.handleBrowseStream))
s.mux.Handle("GET /api/streams/{id}/player-config", wrapAll(s.handlePlayerConfig))
// Scraped links
s.mux.Handle("GET /api/scraped", wrapAll(s.handleScrapedLinks))
@ -291,3 +301,38 @@ func (s *Server) handleBrowseStream(w http.ResponseWriter, r *http.Request) {
extractor.HandleBrowserSession(w, r, streamURL)
}
func (s *Server) handlePlayerConfig(w http.ResponseWriter, r *http.Request) {
id := r.PathValue("id")
streams, err := s.store.LoadStreams()
if err != nil {
log.Printf("server: player-config: failed to load streams: %v", err)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(playerconfig.PlayerConfig{Type: "proxy"})
return
}
var streamURL string
var found bool
for _, st := range streams {
if st.ID == id {
if !st.Published {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(playerconfig.PlayerConfig{Type: "proxy"})
return
}
streamURL = st.URL
found = true
break
}
}
if !found {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(playerconfig.PlayerConfig{Type: "proxy"})
return
}
config := s.playerConfig.GetConfig(r.Context(), streamURL)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(config)
}

View file

@ -15,6 +15,7 @@ import (
"f1-stream/internal/extractor"
"f1-stream/internal/healthcheck"
"f1-stream/internal/models"
"f1-stream/internal/playerconfig"
"f1-stream/internal/scraper"
"f1-stream/internal/server"
"f1-stream/internal/store"
@ -59,7 +60,8 @@ func main() {
hc := healthcheck.New(st, healthInterval, healthTimeout)
// Initialize server
srv := server.New(st, a, sc, origins, headlessEnabled)
pc := playerconfig.New()
srv := server.New(st, a, sc, pc, origins, headlessEnabled)
// Start scraper in background
ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM)

View file

@ -1340,6 +1340,15 @@ dialog .dialog-cancel:hover {
border: none;
}
#clappr-player {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: #000;
}
.browser-viewer-content .loading-overlay {
position: absolute;
inset: 0;

View file

@ -194,6 +194,9 @@
<!-- Browser Session Viewer (inline, inside main via JS) -->
<script src="https://cdn.jsdelivr.net/npm/crypto-js@4/crypto-js.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/hls.js@latest/dist/hls.min.js"></script>
<script src="/static/js/player.js"></script>
<script src="/static/js/utils.js"></script>
<script src="/static/js/auth.js"></script>
<script src="/static/js/streams.js"></script>

View file

@ -0,0 +1,216 @@
// player.js — Native HLS player management using HLS.js directly
var _hlsInstance = null;
var _videoElement = null;
/**
* Fetch player config for a stream from the backend.
* Returns {type: "hls"|"daddylive"|"proxy", hls_url, auth_token, ...}
*/
async function getPlayerConfig(streamId) {
try {
const resp = await fetch('/api/streams/' + streamId + '/player-config');
if (!resp.ok) return { type: 'proxy' };
return await resp.json();
} catch (e) {
console.error('Failed to fetch player config:', e);
return { type: 'proxy' };
}
}
/**
* Decode a /hls/{b64} URL back to the original upstream URL.
*/
function decodeHLSURL(proxyURL) {
if (!proxyURL || typeof proxyURL !== 'string') return proxyURL;
var m = proxyURL.match(/\/hls\/([A-Za-z0-9_-]+)/);
if (!m) return proxyURL;
try {
// base64url decode
var b64 = m[1].replace(/-/g, '+').replace(/_/g, '/');
// pad
while (b64.length % 4 !== 0) b64 += '=';
return atob(b64);
} catch (e) {
return proxyURL;
}
}
/**
* Create an HLS.js player for a plain HLS stream.
*/
function createHLSPlayer(containerSelector, hlsURL) {
destroyNativePlayer();
_buildPlayer(containerSelector, hlsURL, {});
}
/**
* Create an HLS.js player for DaddyLive streams with auth module integration.
*/
function createDaddyLivePlayer(containerSelector, config) {
destroyNativePlayer();
if (config.auth_mod_url) {
_loadAuthModAndPlay(containerSelector, config);
} else {
_buildPlayer(containerSelector, config.hls_url, {});
}
}
function _loadAuthModAndPlay(containerSelector, config) {
var script = document.createElement('script');
script.src = config.auth_mod_url;
script.onload = function () {
_createDaddyLivePlayerWithAuth(containerSelector, config);
};
script.onerror = function () {
console.warn('Failed to load auth module, falling back to direct HLS');
_buildPlayer(containerSelector, config.hls_url, {});
};
document.head.appendChild(script);
}
function _createDaddyLivePlayerWithAuth(containerSelector, config) {
var hlsConfig = {};
// If EPlayerAuth is available, set up xhr wrapping
if (typeof EPlayerAuth !== 'undefined' && typeof EPlayerAuth.init === 'function') {
try {
EPlayerAuth.init({
authToken: config.auth_token,
channelKey: config.channel_key,
channelSalt: config.channel_salt,
timestamp: config.timestamp,
serverKey: config.server_key
});
if (typeof EPlayerAuth.getXhrSetup === 'function') {
var origSetup = EPlayerAuth.getXhrSetup();
hlsConfig.xhrSetup = function (xhr, url) {
// Decode the real upstream URL from our /hls/{b64} proxy path
var realURL = decodeHLSURL(url);
// Create interceptor to capture headers the auth module sets
var captured = {};
var fakeXHR = {
setRequestHeader: function (k, v) { captured[k] = v; }
};
try {
origSetup(fakeXHR, realURL);
} catch (e) {
console.warn('Auth xhrSetup error:', e);
}
// Re-set captured headers with forwarding prefix
for (var k in captured) {
if (captured.hasOwnProperty(k)) {
xhr.setRequestHeader('X-Hls-Forward-' + k, captured[k]);
}
}
};
}
} catch (e) {
console.warn('EPlayerAuth init failed:', e);
}
}
_buildPlayer(containerSelector, config.hls_url, hlsConfig);
}
/**
* Build an HLS.js player with a <video> element.
*/
function _buildPlayer(containerSelector, hlsURL, extraConfig) {
var container = document.querySelector(containerSelector);
if (!container) return;
// Create video element
var video = document.createElement('video');
video.controls = true;
video.autoplay = true;
video.style.width = '100%';
video.style.height = '100%';
video.style.backgroundColor = '#000';
container.appendChild(video);
_videoElement = video;
if (Hls.isSupported()) {
var config = {
enableWorker: true,
lowLatencyMode: false,
maxBufferLength: 30,
maxMaxBufferLength: 60
};
// Merge extra config (e.g. xhrSetup for auth)
for (var k in extraConfig) {
if (extraConfig.hasOwnProperty(k)) {
config[k] = extraConfig[k];
}
}
var hls = new Hls(config);
hls.loadSource(hlsURL);
hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED, function () {
video.play().catch(function(e) {
console.warn('Autoplay blocked:', e);
});
});
hls.on(Hls.Events.ERROR, function (event, data) {
console.error('HLS.js error:', data.type, data.details, data);
if (data.fatal) {
switch (data.type) {
case Hls.ErrorTypes.NETWORK_ERROR:
console.warn('HLS network error, attempting recovery...');
hls.startLoad();
break;
case Hls.ErrorTypes.MEDIA_ERROR:
console.warn('HLS media error, attempting recovery...');
hls.recoverMediaError();
break;
default:
console.error('HLS fatal error, cannot recover');
hls.destroy();
break;
}
}
});
_hlsInstance = hls;
} else if (video.canPlayType('application/vnd.apple.mpegurl')) {
// Safari native HLS
video.src = hlsURL;
video.addEventListener('loadedmetadata', function () {
video.play().catch(function(e) {
console.warn('Autoplay blocked:', e);
});
});
} else {
container.textContent = 'HLS playback is not supported in this browser.';
}
}
/**
* Destroy the current native player instance.
*/
function destroyNativePlayer() {
if (_hlsInstance) {
try {
_hlsInstance.destroy();
} catch (e) {
console.warn('Error destroying HLS instance:', e);
}
_hlsInstance = null;
}
if (_videoElement) {
try {
_videoElement.pause();
_videoElement.removeAttribute('src');
_videoElement.load();
_videoElement.remove();
} catch (e) {
console.warn('Error removing video element:', e);
}
_videoElement = null;
}
}

View file

@ -317,9 +317,9 @@ function closeRedditViewer() {
contentEl.querySelectorAll(':scope > :not(#reddit-viewer-loader)').forEach(el => el.remove());
}
// --- Browser Session Viewer (Iframe Proxy) ---
// --- Browser Session Viewer (Iframe Proxy + Native Player) ---
function openBrowserSession(streamId, streamTitle, streamURL) {
async function openBrowserSession(streamId, streamTitle, streamURL) {
const viewer = document.getElementById('browser-viewer');
const statusEl = viewer.querySelector('.browser-viewer-status');
const contentEl = viewer.querySelector('.browser-viewer-content');
@ -331,7 +331,41 @@ function openBrowserSession(streamId, streamTitle, streamURL) {
statusEl.classList.remove('connected');
loader.classList.remove('hidden');
// Parse the stream URL to extract origin and path
if (urlText) urlText.textContent = streamURL;
if (openOriginal) openOriginal.href = streamURL;
// Hide all tab content sections and show the viewer
document.querySelectorAll('.tab-content').forEach(s => s.classList.remove('active'));
viewer.classList.remove('hidden');
viewer.classList.add('active');
// Remove any existing iframe or player
contentEl.querySelectorAll('.browser-iframe').forEach(el => el.remove());
contentEl.querySelectorAll('#clappr-player').forEach(el => el.remove());
destroyNativePlayer();
// Fetch player config to determine stream type
const config = await getPlayerConfig(streamId);
if (config.type === 'hls' || config.type === 'daddylive') {
// Native player mode
const playerDiv = document.createElement('div');
playerDiv.id = 'clappr-player';
contentEl.appendChild(playerDiv);
loader.classList.add('hidden');
statusEl.textContent = 'Playing';
statusEl.classList.add('connected');
if (config.type === 'daddylive') {
createDaddyLivePlayer('#clappr-player', config);
} else {
createHLSPlayer('#clappr-player', config.hls_url);
}
return;
}
// Fallback: iframe proxy mode
let parsed;
try {
parsed = new URL(streamURL);
@ -344,25 +378,9 @@ function openBrowserSession(streamId, streamTitle, streamURL) {
const origin = parsed.origin;
const pathAndSearch = parsed.pathname + parsed.search + parsed.hash;
// Base64-encode the origin (URL-safe, no padding)
const b64Origin = btoa(origin).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
// Build proxy URL
const proxyURL = '/proxy/' + b64Origin + pathAndSearch;
if (urlText) urlText.textContent = streamURL;
if (openOriginal) openOriginal.href = streamURL;
// Hide all tab content sections and show the viewer
document.querySelectorAll('.tab-content').forEach(s => s.classList.remove('active'));
viewer.classList.remove('hidden');
viewer.classList.add('active');
// Remove any existing iframe
contentEl.querySelectorAll('.browser-iframe').forEach(el => el.remove());
// Create iframe with sandbox to prevent frame-busting and top-navigation
const iframe = document.createElement('iframe');
iframe.src = proxyURL;
iframe.className = 'browser-iframe';
@ -378,11 +396,13 @@ function openBrowserSession(streamId, streamTitle, streamURL) {
}
function closeBrowserSession() {
destroyNativePlayer();
const viewer = document.getElementById('browser-viewer');
viewer.classList.add('hidden');
viewer.classList.remove('active');
const contentEl = viewer.querySelector('.browser-viewer-content');
contentEl.querySelectorAll('.browser-iframe').forEach(el => el.remove());
contentEl.querySelectorAll('#clappr-player').forEach(el => el.remove());
const statusEl = viewer.querySelector('.browser-viewer-status');
statusEl.textContent = '';
statusEl.classList.remove('connected');

View file

@ -37,7 +37,7 @@ resource "kubernetes_deployment" "f1-stream" {
}
spec {
container {
image = "viktorbarzin/f1-stream:v1.2.8"
image = "viktorbarzin/f1-stream:v1.3.1"
name = "f1-stream"
resources {
limits = {
@ -126,9 +126,10 @@ module "tls_secret" {
module "ingress" {
source = "../ingress_factory"
namespace = kubernetes_namespace.f1-stream.metadata[0].name
name = "f1"
tls_secret_name = var.tls_secret_name
rybbit_site_id = "7e69786f66d5"
source = "../ingress_factory"
namespace = kubernetes_namespace.f1-stream.metadata[0].name
name = "f1"
tls_secret_name = var.tls_secret_name
rybbit_site_id = "7e69786f66d5"
exclude_crowdsec = true
}