jellytau/src/lib/components/player/VideoPlayer.svelte
Duncan Tourolle 3a9c126dfe
Some checks failed
🏗️ Build and Test JellyTau / Run Tests (push) Failing after 14s
🏗️ Build and Test JellyTau / Build Android APK (push) Has been skipped
Traceability Validation / Check Requirement Traces (push) Failing after 1s
Fix warnings and update tracability
2026-02-28 20:54:25 +01:00

1601 lines
61 KiB
Svelte

<!-- TRACES: UR-003, UR-005, UR-020, UR-021, UR-026 | DR-010, DR-023, DR-024 -->
<script lang="ts">
import { onMount, onDestroy } from "svelte";
import { invoke } from "@tauri-apps/api/core";
import { listen } from "@tauri-apps/api/event";
import Hls from "hls.js";
import type { MediaItem } from "$lib/api/types";
import { auth } from "$lib/stores/auth";
import VolumeControl from "./VolumeControl.svelte";
import SleepTimerModal from "./SleepTimerModal.svelte";
import SleepTimerIndicator from "./SleepTimerIndicator.svelte";
import CachedImage from "../common/CachedImage.svelte";
import { sleepTimerActive } from "$lib/stores/sleepTimer";
interface Props {
media: MediaItem | null;
streamUrl: string;
mediaSourceId?: string; // Media source ID for subtitle URLs
initialPosition?: number; // Position in seconds to seek to after load (for resume)
needsTranscoding?: boolean; // Whether content needs transcoding (HEVC/10-bit) - affects seeking behavior
onClose: () => void;
onSeek?: (positionSeconds: number, audioStreamIndex?: number) => Promise<string>; // Returns new stream URL for transcoded seeking
onReportProgress?: (positionSeconds: number, isPaused: boolean) => void;
onReportStart?: (positionSeconds: number) => void;
onReportStop?: (positionSeconds: number) => void;
onEnded?: () => void; // Called when video playback ends naturally
onNext?: () => void; // Called when user clicks next episode button
hasNext?: boolean; // Whether there is a next episode available
}
let { media, streamUrl, mediaSourceId, initialPosition, needsTranscoding = false, onClose, onSeek, onReportProgress, onReportStart, onReportStop, onEnded, onNext, hasNext = false }: Props = $props();
let videoElement: HTMLVideoElement | null = $state(null);
let isPlaying = $state(false);
let currentTime = $state(0);
let isFullscreen = $state(false);
let showControls = $state(true);
let showSleepTimerModal = $state(false);
let isBuffering = $state(false);
let controlsTimeout: ReturnType<typeof setTimeout> | null = null;
let seekOffset = $state(0); // Track offset when seeking in transcoded streams
let isSeeking = $state(false);
let currentStreamUrl = $derived(streamUrl);
let hasReportedStart = $state(false);
let progressInterval: ReturnType<typeof setInterval> | null = null;
let isMediaReady = $state(false); // Track if media is ready to play (implements Loading state from DR-001)
let hasPerformedInitialSeek = $state(false); // Track if we've seeked to initialPosition
let canplayFallbackTimeout: ReturnType<typeof setTimeout> | null = null; // Fallback timeout for canplay event
let isDraggingSeekBar = $state(false); // Track if user is dragging the seek bar
let debugLogInterval: ReturnType<typeof setInterval> | null = null; // Debug logging interval
let rafId: number | null = null; // RequestAnimationFrame ID for smooth time updates
// Touch gesture state
let touchStartX = $state(0);
let touchStartY = $state(0);
let touchStartTime = $state(0);
let lastTapTime = $state(0);
let tapTimeout: ReturnType<typeof setTimeout> | null = null;
let brightness = $state(1); // 0-2, default 1
let showDoubleTapFeedback = $state<"left" | "right" | null>(null);
let doubleTapFeedbackTimeout: ReturnType<typeof setTimeout> | null = null;
let swipeGestureActive = $state(false);
// Backend info from Rust (Rust decides which backend to use based on platform)
let useHtml5Element = $state(true); // Default to HTML5, Rust will override if using native backend
let didStartNativePlayback = $state(false); // Track if we started playback (to know if we should stop on unmount)
let didStopBackendEarly = $state(false); // Track if we stopped backend early for non-transcoded content
let swipeType = $state<"brightness" | null>(null);
let hls: Hls | null = null; // HLS.js instance for streaming HLS content
// Audio track selection
let showAudioTrackMenu = $state(false);
let selectedAudioTrackIndex = $state<number | null>(null);
// Subtitle track selection
let showSubtitleMenu = $state(false);
let selectedSubtitleIndex = $state<number | null>(null);
// Track duration from video element (for when media item doesn't have runTimeTicks)
let videoDuration = $state(0);
// Use known duration from media item (runTimeTicks is in 10M ticks/second)
// Fallback to video element duration for direct streams
const duration = $derived.by(() => {
// Explicitly check if runTimeTicks exists and is a valid number
if (media && media.runTimeTicks && media.runTimeTicks > 0) {
return media.runTimeTicks / 10_000_000;
}
// Otherwise use the video element's duration
return videoDuration;
});
// Get available audio tracks from media streams
const audioTracks = $derived(() => {
if (!media || !media.mediaStreams) {
console.log("[VideoPlayer] No media or mediaStreams available");
return [];
}
const tracks = media.mediaStreams.filter(stream => stream.type === "Audio");
console.log("[VideoPlayer] Found audio tracks:", tracks.length, tracks);
return tracks;
});
// Function to find best matching audio track based on preference
function findBestAudioTrack(preference: { audioTrackDisplayTitle?: string | null, audioTrackLanguage?: string | null }) {
const tracks = audioTracks();
if (tracks.length === 0) return null;
// Try to match by display title first
if (preference.audioTrackDisplayTitle) {
const match = tracks.find(t => t.displayTitle === preference.audioTrackDisplayTitle);
if (match) {
console.log("[VideoPlayer] Matched audio track by display title:", match.displayTitle);
return match.index;
}
}
// Try to match by language
if (preference.audioTrackLanguage) {
const match = tracks.find(t => t.language === preference.audioTrackLanguage);
if (match) {
console.log("[VideoPlayer] Matched audio track by language:", match.language);
return match.index;
}
}
// Fall back to default track
const defaultTrack = tracks.find(t => t.isDefault) || tracks[0];
console.log("[VideoPlayer] Using default/first audio track:", defaultTrack.displayTitle || defaultTrack.language);
return defaultTrack.index;
}
// Load series audio preference on mount
async function loadSeriesAudioPreference() {
if (!media || !media.seriesId) return;
try {
const userId = auth.getUserId();
if (!userId) return;
const preference = await invoke<{ seriesId: string, audioTrackDisplayTitle?: string | null, audioTrackLanguage?: string | null, audioTrackIndex?: number | null } | null>(
"storage_get_series_audio_preference",
{
userId,
seriesId: media.seriesId
}
);
if (preference) {
console.log("[VideoPlayer] Loaded series audio preference:", preference);
const matchedIndex = findBestAudioTrack(preference);
if (matchedIndex !== null) {
selectedAudioTrackIndex = matchedIndex;
console.log("[VideoPlayer] Applied series audio preference, track index:", matchedIndex);
}
}
} catch (err) {
console.warn("[VideoPlayer] Failed to load series audio preference:", err);
}
}
// Get available subtitle tracks from media streams
const subtitleTracks = $derived(() => {
if (!media || !media.mediaStreams) {
console.log("[VideoPlayer] No media or mediaStreams available for subtitles");
return [];
}
const tracks = media.mediaStreams.filter(stream => stream.type === "Subtitle");
console.log("[VideoPlayer] Found subtitle tracks:", tracks.length, tracks);
return tracks;
});
// Track the last prop value to detect when parent changes the URL (vs internal seeks)
let lastStreamUrlProp = $state("");
// Update stream URL when prop changes (from parent component, not from internal seeks)
$effect(() => {
// Only reset when the streamUrl prop actually changes from parent
if (streamUrl !== lastStreamUrlProp) {
lastStreamUrlProp = streamUrl;
currentStreamUrl = streamUrl;
seekOffset = 0;
isMediaReady = false; // Reset to loading state when stream URL changes
hasPerformedInitialSeek = false; // Reset so new video can seek to initial position
}
});
// Set up HLS.js for HLS streams
$effect(() => {
if (!useHtml5Element || !videoElement || !currentStreamUrl) {
return;
}
const isHlsStream = currentStreamUrl.includes('.m3u8');
if (isHlsStream && Hls.isSupported()) {
// Clean up existing HLS instance if any - CRITICAL for preventing dual audio
if (hls) {
console.log('[VideoPlayer] Cleaning up existing HLS instance');
// Detach from media element first to stop all audio/video
hls.detachMedia();
// Stop loading and flush buffers
hls.stopLoad();
// Destroy the instance
hls.destroy();
hls = null;
}
// Clear video element completely to stop any residual playback
// This is critical to prevent dual audio streams
if (videoElement.src) {
videoElement.pause(); // Ensure playback is stopped
videoElement.removeAttribute('src');
videoElement.load(); // Reset the media element and clear all buffers
videoElement.currentTime = 0;
}
// Small delay to ensure cleanup completes before creating new instance
// This prevents race conditions with dual audio
setTimeout(() => {
if (!videoElement) return;
console.log('[VideoPlayer] Creating new HLS instance for:', currentStreamUrl);
// Create new HLS instance
hls = new Hls({
debug: true, // Enable debug logging to diagnose loading issues
enableWorker: true,
lowLatencyMode: false,
// Buffer configuration for smooth playback without gaps
maxBufferLength: 60, // Maximum buffer length in seconds (increased for smoother playback)
maxMaxBufferLength: 120, // Maximum max buffer length in seconds
backBufferLength: 60, // Keep 60 seconds of back buffer to prevent gaps
maxBufferSize: 100 * 1000 * 1000, // 100MB max buffer size
maxBufferHole: 0.5, // Maximum buffer hole tolerance before seeking over it
maxFragLookUpTolerance: 0.25, // Fragment lookup tolerance
// Improve stall recovery
abrEwmaDefaultEstimate: 500000, // Initial bandwidth estimate
abrBandWidthFactor: 0.95, // Conservative bandwidth estimation
abrBandWidthUpFactor: 0.7, // Slower quality upgrades to reduce buffering
// Prevent aggressive buffer eviction
liveSyncDurationCount: 3, // Only for live streams
liveMaxLatencyDurationCount: 10, // Only for live streams
});
// Attach media element
hls.attachMedia(videoElement);
// Listen for media attached event
hls.on(Hls.Events.MEDIA_ATTACHED, () => {
console.log('[VideoPlayer] HLS.js attached to video element');
// Load the HLS stream
hls!.loadSource(currentStreamUrl);
});
// Listen for manifest parsed event
hls.on(Hls.Events.MANIFEST_PARSED, () => {
console.log('[VideoPlayer] HLS manifest parsed, ready to play');
});
// Handle errors
hls.on(Hls.Events.ERROR, (event, data) => {
console.error('[VideoPlayer] HLS error:', data);
if (data.fatal) {
switch (data.type) {
case Hls.ErrorTypes.NETWORK_ERROR:
console.error('[VideoPlayer] Fatal network error, trying to recover');
hls!.startLoad();
break;
case Hls.ErrorTypes.MEDIA_ERROR:
console.error('[VideoPlayer] Fatal media error, trying to recover');
hls!.recoverMediaError();
break;
default:
console.error('[VideoPlayer] Unrecoverable HLS error');
hls!.destroy();
break;
}
}
});
}, 50); // 50ms delay to ensure cleanup completes
// Cleanup on effect re-run
return () => {
console.log('[VideoPlayer] Effect cleanup: destroying HLS instance');
if (hls) {
hls.detachMedia();
hls.stopLoad();
hls.destroy();
hls = null;
}
if (videoElement) {
videoElement.pause();
}
};
} else if (isHlsStream && videoElement.canPlayType('application/vnd.apple.mpegurl')) {
// Native HLS support (Safari)
console.log('[VideoPlayer] Using native HLS support');
videoElement.src = currentStreamUrl;
} else {
// Not an HLS stream, use regular video element
console.log('[VideoPlayer] Using regular video element for non-HLS stream');
}
});
// Ensure video element is unmuted and has max volume when it's bound (critical for Android)
$effect(() => {
if (videoElement) {
videoElement.muted = false;
videoElement.volume = 1.0;
console.log("[VideoPlayer] Video element configured: muted=", videoElement.muted, "volume=", videoElement.volume);
// DIAGNOSTIC: Check if video has audio tracks
if ((videoElement as any).audioTracks) {
console.log("[VideoPlayer] Audio tracks count:", (videoElement as any).audioTracks.length);
// Set initial audio track (prefer default track)
if (selectedAudioTrackIndex === null && audioTracks().length > 0) {
const defaultTrack = audioTracks().find(t => t.isDefault);
selectedAudioTrackIndex = defaultTrack ? defaultTrack.index : audioTracks()[0].index;
console.log("[VideoPlayer] Selected default audio track:", selectedAudioTrackIndex);
}
}
if ((videoElement as any).mozHasAudio !== undefined) {
console.log("[VideoPlayer] mozHasAudio:", (videoElement as any).mozHasAudio);
}
if ((videoElement as any).webkitAudioDecodedByteCount !== undefined) {
console.log("[VideoPlayer] webkitAudioDecodedByteCount:", (videoElement as any).webkitAudioDecodedByteCount);
}
}
});
// Handle initial position changes (for resuming same video from different position)
$effect(() => {
// Track initialPosition changes - if it changes and video is ready, seek to new position
const pos = initialPosition;
if (pos && pos > 0 && isMediaReady && videoElement && hasPerformedInitialSeek) {
// Position changed after initial seek was done - seek to new position
console.log("[VideoPlayer] Initial position changed, seeking to:", pos);
hasPerformedInitialSeek = false; // Reset flag to allow handleCanPlay to seek
// Trigger seek directly since canplay won't fire for same video
videoElement.currentTime = pos;
currentTime = pos;
}
});
// Set up progress reporting interval
onMount(async () => {
// Initialize player via Rust - Rust will decide which backend to use based on platform
if (media && currentStreamUrl) {
try {
console.log("[VideoPlayer] Initializing player for:", media.name);
console.log("[VideoPlayer] Stream URL:", currentStreamUrl);
// Build subtitle tracks for native player
const subtitleTracks = [];
if (media.mediaStreams && mediaSourceId) {
const subtitles = media.mediaStreams.filter(s => s.type === "Subtitle");
for (const sub of subtitles) {
try {
const url = await getSubtitleUrl(sub.index);
if (url) {
subtitleTracks.push({
index: sub.index,
url: url,
language: sub.language || null,
label: sub.displayTitle || sub.language || `Track ${sub.index}`,
mime_type: "text/vtt" // Jellyfin converts to WebVTT
});
}
} catch (err) {
console.warn(`[VideoPlayer] Failed to build subtitle URL for track ${sub.index}:`, err);
}
}
console.log(`[VideoPlayer] Built ${subtitleTracks.length} subtitle tracks for native player`);
}
// Call Rust backend to start playback
// Rust will choose ExoPlayer (Android), libmpv (Linux), or tell us to use HTML5
// Send minimal video data - no complex serialization to avoid Tauri Android issues
const response: any = await invoke("player_play_item", {
item: {
streamUrl: currentStreamUrl,
title: media.name,
id: media.id,
videoCodec: needsTranscoding ? "hevc" : "h264",
needsTranscoding: needsTranscoding,
},
});
// Rust tells us which backend it's using
useHtml5Element = response.useHtml5Element;
console.log(`[VideoPlayer] Backend: ${response.backend}, useHtml5Element: ${useHtml5Element}`);
// If using HTML5 element for non-transcoded content, stop the backend player
// For transcoded content, we need to keep the backend running to handle seeking/audio track switching
if (useHtml5Element && !needsTranscoding) {
try {
console.log("[VideoPlayer] Using HTML5 for direct stream - stopping backend player to prevent dual audio");
await invoke("player_stop");
didStopBackendEarly = true; // Track that we stopped the backend
} catch (err) {
console.warn("[VideoPlayer] Failed to stop backend player:", err);
}
} else if (useHtml5Element && needsTranscoding) {
console.log("[VideoPlayer] Using HTML5 for transcoded stream - keeping backend for seeking/transcoding decisions");
// Backend is kept running but should not play audio since HTML5 element handles playback
didStartNativePlayback = true; // Track that we need to stop backend on unmount
}
if (!useHtml5Element) {
// Using native backend, subscribe to player events
didStartNativePlayback = true; // Track that we started native playback
const unlisten1 = await listen("player://position-update", (event: any) => {
if (!isDraggingSeekBar) {
currentTime = event.payload.position;
}
});
const unlisten2 = await listen("player://state-changed", (event: any) => {
isPlaying = event.payload.state === "playing";
});
// Clean up listeners on destroy
onDestroy(() => {
unlisten1();
unlisten2();
});
}
} catch (err) {
console.error("[VideoPlayer] Failed to initialize player:", err);
// Fallback to HTML5 on error
useHtml5Element = true;
// For non-transcoded content, try to stop any backend player that might have started
if (!needsTranscoding) {
try {
await invoke("player_stop");
didStopBackendEarly = true;
} catch (stopErr) {
// Ignore errors when stopping
}
} else {
// For transcoded content, keep backend for seeking
didStartNativePlayback = true;
}
}
}
// Load series audio preference (for TV shows)
await loadSeriesAudioPreference();
// Report progress every 10 seconds while playing
progressInterval = setInterval(() => {
if (isPlaying && !isSeeking && onReportProgress) {
onReportProgress(currentTime, false);
}
}, 10000);
// Debug logging every second
debugLogInterval = setInterval(() => {
if (videoElement && isPlaying && !isSeeking) {
const buffered = videoElement.buffered;
const bufferedRanges = [];
for (let i = 0; i < buffered.length; i++) {
bufferedRanges.push(`[${buffered.start(i).toFixed(1)} - ${buffered.end(i).toFixed(1)}]`);
}
console.log("[VideoPlayer Debug]", {
currentTime: videoElement.currentTime.toFixed(2),
displayTime: currentTime.toFixed(2),
buffered: bufferedRanges.join(", "),
readyState: videoElement.readyState,
paused: videoElement.paused,
seeking: videoElement.seeking,
playbackRate: videoElement.playbackRate,
});
}
}, 1000);
});
onDestroy(async () => {
// Stop RAF loop
stopTimeUpdates();
if (progressInterval) {
clearInterval(progressInterval);
}
if (debugLogInterval) {
clearInterval(debugLogInterval);
}
// Clean up HLS.js instance - prevent dual audio on unmount
if (hls) {
console.log("[VideoPlayer] Destroying HLS.js instance on unmount");
hls.detachMedia(); // Detach from video element first
hls.stopLoad(); // Stop loading and flush buffers
hls.destroy();
hls = null;
}
// Stop video element playback
if (videoElement) {
videoElement.pause();
videoElement.src = '';
videoElement.load();
}
// Stop the player when component is destroyed
// Skip if we already stopped the backend early (non-transcoded + HTML5)
if (didStartNativePlayback && !didStopBackendEarly) {
try {
console.log("[VideoPlayer] Stopping backend player on component unmount");
await invoke("player_stop");
} catch (err) {
console.error("[VideoPlayer] Failed to stop backend player:", err);
}
}
// Report stop when component is destroyed
if (onReportStop && currentTime > 0) {
onReportStop(currentTime);
}
});
// Smooth time updates using requestAnimationFrame (60fps)
function updateTimeLoop() {
if (videoElement && !isSeeking && !isDraggingSeekBar && isPlaying) {
// Add seek offset to get actual position in the full video
const newCurrentTime = seekOffset + videoElement.currentTime;
// Safety check: only update if the video element is actually playing
// This prevents stale data from overwriting currentTime after failed seeks
if (videoElement.readyState >= 2) { // HAVE_CURRENT_DATA or better
currentTime = newCurrentTime;
}
// Continue the loop while playing
rafId = requestAnimationFrame(updateTimeLoop);
}
}
// Start/stop RAF loop based on playback state
function startTimeUpdates() {
if (rafId === null) {
rafId = requestAnimationFrame(updateTimeLoop);
}
}
function stopTimeUpdates() {
if (rafId !== null) {
cancelAnimationFrame(rafId);
rafId = null;
}
}
// Fallback: Update time on timeupdate event (for when RAF isn't running)
function handleTimeUpdate() {
if (videoElement && !isSeeking && !isDraggingSeekBar && !isPlaying) {
const newCurrentTime = seekOffset + videoElement.currentTime;
if (videoElement.readyState >= 2) {
currentTime = newCurrentTime;
}
}
}
function handleLoadedMetadata() {
console.log("[VideoPlayer] loadedmetadata event");
console.log("[VideoPlayer] Video element duration:", videoElement?.duration);
console.log("[VideoPlayer] Media item runTimeTicks:", media?.runTimeTicks);
console.log("[VideoPlayer] Needs transcoding:", needsTranscoding);
// For direct streams without runTimeTicks, use video element's duration
if (videoElement && videoElement.duration && !isNaN(videoElement.duration) && videoElement.duration !== Infinity) {
const newDuration = videoElement.duration;
console.log("[VideoPlayer] Setting videoDuration to:", newDuration);
videoDuration = newDuration;
console.log("[VideoPlayer] videoDuration state is now:", videoDuration);
}
// Use setTimeout to log the derived value after reactive updates
setTimeout(() => {
console.log("[VideoPlayer] Derived duration value:", duration);
console.log("[VideoPlayer] Duration source:", media?.runTimeTicks ? "runTimeTicks" : "video element");
}, 0);
}
async function handleCanPlay() {
// Media is ready to play - transition from Loading to Playing state (DR-001)
console.log("[VideoPlayer] canplay event fired - media is ready");
isMediaReady = true;
// Ensure video is unmuted and at max volume (critical for Android)
if (videoElement) {
videoElement.muted = false;
videoElement.volume = 1.0;
console.log("[VideoPlayer] Video unmuted on canplay, volume: 1.0");
}
// Seek to initial position if resuming playback
if (initialPosition && initialPosition > 0 && !hasPerformedInitialSeek && videoElement) {
console.log("[VideoPlayer] Seeking to initial position:", initialPosition);
hasPerformedInitialSeek = true;
// Pause video to prevent autoplay from starting at position 0
const wasPlaying = !videoElement.paused;
videoElement.pause();
try {
videoElement.currentTime = initialPosition;
currentTime = initialPosition;
// Wait for the seek to complete before resuming playback
await new Promise<void>((resolve) => {
const onSeeked = () => {
videoElement?.removeEventListener("seeked", onSeeked);
resolve();
};
videoElement!.addEventListener("seeked", onSeeked);
// Fallback timeout in case seeked event doesn't fire
setTimeout(() => {
videoElement?.removeEventListener("seeked", onSeeked);
resolve();
}, 2000);
});
// Resume playback after seek completes
if (wasPlaying || videoElement.autoplay) {
await videoElement.play();
}
} catch (err) {
console.error("[VideoPlayer] Failed to seek to initial position:", err);
}
}
}
function handleError(e: Event) {
const video = e.target as HTMLVideoElement;
const error = video.error;
// Log comprehensive error details
console.error("[VideoPlayer] Video error event:", {
code: error?.code,
message: error?.message,
networkState: video.networkState,
readyState: video.readyState,
currentSrc: video.currentSrc,
src: video.src,
});
// MediaError codes: 1=ABORTED, 2=NETWORK, 3=DECODE, 4=SRC_NOT_SUPPORTED
const errorMessages: Record<number, string> = {
1: "Playback aborted",
2: "Network error while loading video - check server connectivity and CORS headers",
3: "Video decoding failed - codec may not be supported by browser",
4: "Video format not supported - may need transcoding",
};
const errorCode = error?.code || 0;
const msg = errorMessages[errorCode] || `Unknown error (code ${errorCode})`;
console.error("[VideoPlayer] Error interpretation:", msg);
// Log additional debugging info
console.error("[VideoPlayer] Stream URL:", currentStreamUrl);
console.error("[VideoPlayer] Needs transcoding:", needsTranscoding);
// Network state meanings: 0=EMPTY, 1=IDLE, 2=LOADING, 3=NO_SOURCE
const networkStates = ["NETWORK_EMPTY", "NETWORK_IDLE", "NETWORK_LOADING", "NETWORK_NO_SOURCE"];
console.error("[VideoPlayer] Network state:", networkStates[video.networkState] || video.networkState);
// Ready state meanings: 0=NOTHING, 1=METADATA, 2=CURRENT_DATA, 3=FUTURE_DATA, 4=ENOUGH_DATA
const readyStates = ["HAVE_NOTHING", "HAVE_METADATA", "HAVE_CURRENT_DATA", "HAVE_FUTURE_DATA", "HAVE_ENOUGH_DATA"];
console.error("[VideoPlayer] Ready state:", readyStates[video.readyState] || video.readyState);
}
function handleWaiting() {
console.log("[VideoPlayer] waiting event - buffering");
isBuffering = true;
}
function handlePlaying() {
console.log("[VideoPlayer] playing event - playback resumed");
isBuffering = false;
}
function handleLoadStart() {
console.log("[VideoPlayer] loadstart event - starting to load:", currentStreamUrl);
console.log("[VideoPlayer] Video element readyState:", videoElement?.readyState);
console.log("[VideoPlayer] Video element networkState:", videoElement?.networkState);
// Clear any existing fallback timeout
if (canplayFallbackTimeout) {
clearTimeout(canplayFallbackTimeout);
}
// Set up a fallback timeout in case canplay event never fires
canplayFallbackTimeout = setTimeout(() => {
if (!isMediaReady && videoElement) {
console.warn("[VideoPlayer] canplay event did not fire within 5 seconds");
console.log("[VideoPlayer] Fallback check - readyState:", videoElement.readyState, "networkState:", videoElement.networkState);
// Check if video is actually ready despite event not firing
if (videoElement.readyState >= 3) { // HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA
console.log("[VideoPlayer] Video appears ready (readyState >= 3), forcing media ready state");
isMediaReady = true;
}
}
}, 5000);
}
function handlePlay() {
isPlaying = true;
startTimeUpdates(); // Start RAF loop for smooth time updates
// Report playback start on first play
if (!hasReportedStart && onReportStart) {
onReportStart(currentTime);
hasReportedStart = true;
}
}
function handlePause() {
isPlaying = false;
stopTimeUpdates(); // Stop RAF loop when paused
// Report progress when paused
if (onReportProgress) {
onReportProgress(currentTime, true);
}
}
function handleEnded() {
isPlaying = false;
stopTimeUpdates(); // Stop RAF loop when ended
// Report stop when video ends
if (onReportStop) {
onReportStop(currentTime);
}
// Notify parent that video has ended (for next episode popup)
if (onEnded) {
onEnded();
}
}
async function togglePlayPause() {
if (!useHtml5Element) {
try {
const response = await invoke<{ state: string }>("player_toggle");
isPlaying = response.state === "playing";
} catch (err) {
console.error("[VideoPlayer] Failed to toggle native player:", err);
}
} else if (videoElement) {
if (videoElement.paused) {
videoElement.play();
} else {
videoElement.pause();
}
}
}
function handleSeekBarInput(e: Event) {
const input = e.target as HTMLInputElement;
const targetTime = parseFloat(input.value);
// Update the displayed time immediately for smooth visual feedback
currentTime = targetTime;
}
async function handleSeekBarChange(e: Event) {
const input = e.target as HTMLInputElement;
const targetTime = parseFloat(input.value);
// Set isSeeking immediately to prevent timeupdate from interfering
isSeeking = true;
isDraggingSeekBar = false;
try {
console.log("[VideoPlayer] Seeking to:", targetTime.toFixed(2), "useHtml5Element:", useHtml5Element);
const repo = auth.getRepository();
if (!repo) {
console.error("[VideoPlayer] No repository available");
return;
}
// Backend smart seeking handles both native and HTML5
const response = await invoke<{strategy: string, position?: number, newUrl?: string, seekOffset?: number}>("player_seek_video", {
repositoryHandle: repo.getHandle(),
position: targetTime,
mediaSourceId: mediaSourceId ?? null,
audioStreamIndex: selectedAudioTrackIndex ?? null,
useHtml5: useHtml5Element,
});
console.log("[VideoPlayer] Backend seek response:", response);
// For native backend, the backend handles everything internally
if (!useHtml5Element) {
// Backend already stopped, reloaded, and seeked if needed
currentTime = response.position ?? targetTime;
if (response.strategy === "reloadStream") {
seekOffset = response.seekOffset ?? targetTime;
currentStreamUrl = response.newUrl ?? currentStreamUrl;
} else {
seekOffset = 0;
}
console.log("[VideoPlayer] Native backend seek completed at position:", currentTime);
return;
}
// HTML5 backend - handle video element management
if (!videoElement) {
console.warn("[VideoPlayer] Cannot seek - video element not available");
return;
}
if (response.strategy === "reloadStream") {
// Transcoded stream - reload with new URL
console.log("[VideoPlayer] Reloading HTML5 stream from position:", targetTime);
const wasPlaying = !videoElement.paused;
// CRITICAL: Stop playback completely to prevent dual audio
videoElement.pause();
stopTimeUpdates(); // Stop RAF updates
// CRITICAL: Destroy old HLS instance completely to prevent dual audio
if (hls) {
console.log("[VideoPlayer] Destroying old HLS instance for seek");
hls.detachMedia(); // Detach from video element
hls.stopLoad(); // Stop loading fragments
hls.destroy(); // Completely destroy the instance
hls = null; // Clear reference
}
// CRITICAL: Clear video element buffers completely
if (videoElement.src) {
videoElement.removeAttribute('src');
videoElement.load(); // Reset and flush all buffers
}
// Small delay to ensure cleanup completes before creating new HLS instance
await new Promise(resolve => setTimeout(resolve, 100));
// Update stream URL (this will trigger $effect to create new HLS instance)
seekOffset = response.seekOffset ?? targetTime;
currentStreamUrl = response.newUrl ?? currentStreamUrl;
currentTime = targetTime;
// Wait for video to be ready
await new Promise<void>((resolve) => {
const onCanPlay = () => {
console.log("[VideoPlayer] Transcoded video loaded after seek");
videoElement?.removeEventListener("canplay", onCanPlay);
resolve();
};
if (videoElement) {
videoElement.addEventListener("canplay", onCanPlay);
}
setTimeout(() => {
console.warn("[VideoPlayer] Transcoded seek timeout");
videoElement?.removeEventListener("canplay", onCanPlay);
resolve();
}, 10000);
});
if (wasPlaying && videoElement) {
await videoElement.play();
startTimeUpdates(); // Restart RAF updates
}
} else {
// Native browser seeking
console.log("[VideoPlayer] Using native HTML5 seek to:", targetTime.toFixed(2));
videoElement.currentTime = targetTime;
currentTime = targetTime;
seekOffset = 0;
// Wait for seek to complete
await new Promise<void>((resolve) => {
const onSeeked = () => {
console.log("[VideoPlayer] Native seek completed");
videoElement?.removeEventListener("seeked", onSeeked);
resolve();
};
if (videoElement) {
videoElement.addEventListener("seeked", onSeeked);
}
setTimeout(() => {
videoElement?.removeEventListener("seeked", onSeeked);
resolve();
}, 2000);
});
}
console.log("[VideoPlayer] HTML5 seek completed:", {
strategy: response.strategy,
targetTime: targetTime.toFixed(2),
actualTime: videoElement.currentTime.toFixed(2),
seekOffset,
});
} catch (err) {
console.error("[VideoPlayer] Seek failed:", err);
} finally {
isSeeking = false;
isDraggingSeekBar = false;
}
}
function toggleFullscreen() {
if (!document.fullscreenElement) {
document.documentElement.requestFullscreen();
isFullscreen = true;
} else {
document.exitFullscreen();
isFullscreen = false;
}
}
function formatTime(seconds: number): string {
const mins = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${mins}:${secs.toString().padStart(2, "0")}`;
}
function handleMouseMove() {
showControls = true;
if (controlsTimeout) {
clearTimeout(controlsTimeout);
}
controlsTimeout = setTimeout(() => {
if (isPlaying) {
showControls = false;
}
}, 3000);
}
async function seekRelative(seconds: number) {
isSeeking = true;
const newTime = Math.max(0, Math.min(duration, currentTime + seconds));
console.log("[VideoPlayer] Relative seek:", {
offset: `${seconds > 0 ? "+" : ""}${seconds}s`,
from: currentTime.toFixed(2),
to: newTime.toFixed(2),
});
// Call the unified handleSeekBarChange logic with the new time
// Create a synthetic event to reuse the existing logic
const syntheticEvent = {
target: {
value: newTime.toString()
}
} as unknown as Event;
await handleSeekBarChange(syntheticEvent);
}
function handleKeydown(e: KeyboardEvent) {
if (e.key === " " || e.key === "k") {
e.preventDefault();
togglePlayPause();
} else if (e.key === "f") {
toggleFullscreen();
} else if (e.key === "Escape") {
if (isFullscreen) {
document.exitFullscreen();
} else {
onClose();
}
} else if (e.key === "ArrowLeft") {
e.preventDefault();
seekRelative(-10);
} else if (e.key === "ArrowRight") {
e.preventDefault();
seekRelative(10);
}
}
// Touch gesture handlers
function handleTouchStart(e: TouchEvent) {
const touch = e.touches[0];
touchStartX = touch.clientX;
touchStartY = touch.clientY;
touchStartTime = Date.now();
const now = Date.now();
const timeSinceLastTap = now - lastTapTime;
// Double tap detection (within 300ms)
if (timeSinceLastTap < 300 && timeSinceLastTap > 0) {
e.preventDefault();
handleDoubleTap(touch.clientX);
lastTapTime = 0; // Reset to prevent triple-tap
if (tapTimeout) {
clearTimeout(tapTimeout);
tapTimeout = null;
}
} else {
lastTapTime = now;
// Set timeout to clear if no second tap
tapTimeout = setTimeout(() => {
lastTapTime = 0;
}, 300);
}
}
function handleTouchMove(e: TouchEvent) {
if (!e.touches[0]) return;
const touch = e.touches[0];
const deltaX = touch.clientX - touchStartX;
const deltaY = touch.clientY - touchStartY;
const timeDelta = Date.now() - touchStartTime;
// Minimum movement to register as swipe (50px)
if (Math.abs(deltaY) > 50 && timeDelta > 50) {
swipeGestureActive = true;
// Brightness control on vertical swipe
swipeType = "brightness";
// Map vertical swipe to brightness (0.3 to 1.7 range for better visibility)
const brightnessChange = -deltaY / 300; // Swipe up = brighter
brightness = Math.max(0.3, Math.min(1.7, 1 + brightnessChange));
// Reset touch start for continuous adjustment
touchStartY = touch.clientY;
}
}
function handleTouchEnd(e: TouchEvent) {
swipeGestureActive = false;
swipeType = null;
}
function handleDoubleTap(x: number) {
const screenWidth = window.innerWidth;
const isLeftSide = x < screenWidth / 2;
if (isLeftSide) {
// Double tap left: rewind 10 seconds
seekRelative(-10);
showDoubleTapFeedback = "left";
} else {
// Double tap right: forward 10 seconds
seekRelative(10);
showDoubleTapFeedback = "right";
}
// Hide feedback after animation
if (doubleTapFeedbackTimeout) {
clearTimeout(doubleTapFeedbackTimeout);
}
doubleTapFeedbackTimeout = setTimeout(() => {
showDoubleTapFeedback = null;
}, 800);
}
function toggleAudioTrackMenu() {
showAudioTrackMenu = !showAudioTrackMenu;
}
async function selectAudioTrack(streamIndex: number, arrayIndex: number) {
console.log("[VideoPlayer] Selecting audio track - streamIndex:", streamIndex, "arrayIndex:", arrayIndex);
const previousTrackIndex = selectedAudioTrackIndex;
selectedAudioTrackIndex = streamIndex;
showAudioTrackMenu = false;
try {
const repo = auth.getRepository();
if (!repo) throw new Error("Not authenticated");
// Call unified backend command
const response = await invoke<{strategy: string, success?: boolean, newUrl?: string, position?: number}>("player_switch_audio_track", {
repositoryHandle: repo.getHandle(),
streamIndex,
arrayIndex,
useHtml5: useHtml5Element,
currentPosition: useHtml5Element && videoElement ? videoElement.currentTime + seekOffset : null,
mediaSourceId: mediaSourceId ?? null,
});
// Handle response based on strategy
if (response.strategy === "reloadStream" && useHtml5Element && videoElement) {
console.log("[VideoPlayer] Switching audio track - reloading stream");
// Save state before reload
const wasPlaying = !videoElement.paused;
// CRITICAL: Stop playback completely to prevent dual audio
videoElement.pause();
stopTimeUpdates(); // Stop RAF updates
// CRITICAL: Destroy old HLS instance completely to prevent dual audio
if (hls) {
console.log("[VideoPlayer] Destroying old HLS instance for audio track switch");
hls.detachMedia(); // Detach from video element
hls.stopLoad(); // Stop loading fragments
hls.destroy(); // Completely destroy the instance
hls = null; // Clear reference
}
// CRITICAL: Clear video element buffers completely
if (videoElement.src) {
videoElement.removeAttribute('src');
videoElement.load(); // Reset and flush all buffers
}
// Small delay to ensure cleanup completes before creating new HLS instance
await new Promise(resolve => setTimeout(resolve, 100));
// Update stream URL (this will trigger $effect to create new HLS instance)
currentStreamUrl = response.newUrl!;
seekOffset = response.position!;
// Wait for video to be ready
await new Promise<void>((resolve) => {
const onCanPlay = () => {
console.log("[VideoPlayer] Video reloaded with new audio track");
videoElement?.removeEventListener("canplay", onCanPlay);
resolve();
};
videoElement!.addEventListener("canplay", onCanPlay);
// Timeout fallback
setTimeout(() => {
videoElement?.removeEventListener("canplay", onCanPlay);
resolve();
}, 5000);
});
// Resume playback if it was playing
if (wasPlaying) {
await videoElement.play();
startTimeUpdates(); // Restart RAF updates
}
}
console.log("[VideoPlayer] Successfully changed audio track");
// Save series audio preference for future episodes
if (media && media.seriesId) {
try {
const userId = auth.getUserId();
if (!userId) return;
// Find the selected track info
const selectedTrack = audioTracks().find(t => t.index === streamIndex);
if (selectedTrack) {
await invoke("storage_save_series_audio_preference", {
userId,
seriesId: media.seriesId,
serverId: media.serverId,
audioTrackDisplayTitle: selectedTrack.displayTitle || null,
audioTrackLanguage: selectedTrack.language || null,
audioTrackIndex: streamIndex,
});
console.log("[VideoPlayer] Saved series audio preference:", selectedTrack.displayTitle || selectedTrack.language);
}
} catch (err) {
console.warn("[VideoPlayer] Failed to save series audio preference:", err);
}
}
} catch (err) {
console.error("[VideoPlayer] Failed to change audio track:", err);
// Revert to previous track on error
selectedAudioTrackIndex = previousTrackIndex;
}
}
function toggleSubtitleMenu() {
showSubtitleMenu = !showSubtitleMenu;
}
async function selectSubtitle(streamIndex: number | null, arrayIndex?: number) {
console.log("[VideoPlayer] Selecting subtitle - streamIndex:", streamIndex, "arrayIndex:", arrayIndex);
selectedSubtitleIndex = streamIndex;
showSubtitleMenu = false;
// For HTML5 video element, update the text tracks
if (useHtml5Element && videoElement && videoElement.textTracks) {
// Disable all text tracks first
for (let i = 0; i < videoElement.textTracks.length; i++) {
videoElement.textTracks[i].mode = "disabled";
}
// Enable the selected track if not null
if (streamIndex !== null) {
// Find the corresponding track element by stream index
const tracks = videoElement.querySelectorAll("track");
tracks.forEach((track) => {
const trackStreamIndex = parseInt(track.getAttribute("data-stream-index") || "-1");
if (trackStreamIndex === streamIndex) {
const textTrack = track.track;
if (textTrack) {
textTrack.mode = "showing";
console.log("[VideoPlayer] Enabled subtitle track:", streamIndex);
}
}
});
}
} else if (!useHtml5Element) {
// For native backend (Android), send command to change subtitle track
try {
// Use array index for ExoPlayer (0-based position in subtitle tracks array)
// If streamIndex is null (disable), pass null; otherwise use arrayIndex
const indexToUse = streamIndex === null ? null : (arrayIndex !== undefined ? arrayIndex : streamIndex);
await invoke("player_set_subtitle_track", { streamIndex: indexToUse });
console.log("[VideoPlayer] Native backend subtitle track changed - arrayIndex:", arrayIndex, "used:", indexToUse);
} catch (error) {
console.error("[VideoPlayer] Failed to set subtitle track:", error);
}
}
}
// Get subtitle URL for a given stream index
async function getSubtitleUrl(streamIndex: number): Promise<string> {
if (!media || !mediaSourceId) return "";
try {
const repo = auth.getRepository();
return await repo.getSubtitleUrl(media.id, mediaSourceId ?? "", streamIndex, "vtt");
} catch {
return "";
}
}
</script>
<svelte:window onkeydown={handleKeydown} />
<div
class="fixed inset-0 flex flex-col z-50"
class:bg-black={useHtml5Element}
style:background-color={!useHtml5Element ? 'transparent' : ''}
onmousemove={handleMouseMove}
ontouchstart={handleTouchStart}
ontouchmove={handleTouchMove}
ontouchend={handleTouchEnd}
role="application"
aria-label="Video player"
>
<!-- Video -->
<div class="flex-1 flex items-center justify-center relative">
{#if !!useHtml5Element}
<!-- HTML5 video for desktop/non-Android platforms -->
<video
bind:this={videoElement}
src={currentStreamUrl.includes('.m3u8') && Hls.isSupported() ? '' : currentStreamUrl}
class="max-w-full max-h-full"
class:invisible={!isMediaReady}
style="filter: brightness({brightness})"
playsinline
autoplay
muted={false}
ontimeupdate={handleTimeUpdate}
onloadedmetadata={handleLoadedMetadata}
oncanplay={handleCanPlay}
onplay={handlePlay}
onpause={handlePause}
onended={handleEnded}
onerror={handleError}
onwaiting={handleWaiting}
onplaying={handlePlaying}
onloadstart={handleLoadStart}
onclick={togglePlayPause}
>
<!-- Temporarily disabled to debug playback issues
{#each subtitleTracks() as track}
<track
kind="subtitles"
src={getSubtitleUrl(track.index)}
srclang={track.language || "unknown"}
label={track.displayTitle || track.language || `Track ${track.index}`}
data-stream-index={track.index}
default={track.isDefault}
/>
{/each}
-->
</video>
{:else}
<!-- Android ExoPlayer - video rendered natively in SurfaceView behind WebView -->
<!-- Leave this area transparent so video shows through -->
<div class="flex-1"></div>
{/if}
<!-- Title card with loading spinner (Loading state from DR-001) -->
{#if !isMediaReady}
<div class="absolute inset-0 flex items-center justify-center bg-black">
<!-- Poster/Title Card -->
{#if media?.primaryImageTag}
<CachedImage
itemId={media.id}
imageType="Primary"
tag={media.primaryImageTag}
maxHeight={1080}
alt={media?.name || "Video"}
class="max-w-full max-h-full object-contain"
/>
{:else}
<div class="text-white text-2xl font-semibold px-8 text-center">
{media?.name || "Loading..."}
</div>
{/if}
<!-- Loading spinner overlay -->
<div class="absolute inset-0 flex items-center justify-center bg-black/50">
<div class="w-16 h-16 border-4 border-white border-t-transparent rounded-full animate-spin"></div>
</div>
</div>
{/if}
<!-- Double-tap feedback overlays -->
{#if showDoubleTapFeedback === "left"}
<div class="absolute left-8 top-1/2 -translate-y-1/2 pointer-events-none animate-fade-out">
<div class="bg-white/20 rounded-full p-6 backdrop-blur-sm">
<svg class="w-12 h-12 text-white" fill="currentColor" viewBox="0 0 24 24">
<path d="M11.99 2C6.47 2 2 6.48 2 12s4.47 10 9.99 10C17.52 22 22 17.52 22 12S17.52 2 11.99 2zM12 20c-4.42 0-8-3.58-8-8s3.58-8 8-8 8 3.58 8 8-3.58 8-8 8zm1-13H11v6l5.25 3.15.75-1.23-4-2.42z" />
<text x="12" y="14" text-anchor="middle" font-size="6" fill="white" font-weight="bold">-10</text>
</svg>
</div>
</div>
{/if}
{#if showDoubleTapFeedback === "right"}
<div class="absolute right-8 top-1/2 -translate-y-1/2 pointer-events-none animate-fade-out">
<div class="bg-white/20 rounded-full p-6 backdrop-blur-sm">
<svg class="w-12 h-12 text-white" fill="currentColor" viewBox="0 0 24 24">
<path d="M11.99 2C6.47 2 2 6.48 2 12s4.47 10 9.99 10C17.52 22 22 17.52 22 12S17.52 2 11.99 2zM12 20c-4.42 0-8-3.58-8-8s3.58-8 8-8 8 3.58 8 8-3.58 8-8 8zm1-13H11v6l5.25 3.15.75-1.23-4-2.42z" />
<text x="12" y="14" text-anchor="middle" font-size="6" fill="white" font-weight="bold">+10</text>
</svg>
</div>
</div>
{/if}
<!-- Swipe gesture feedback -->
{#if swipeGestureActive && swipeType === "brightness"}
<div class="absolute left-8 top-1/2 -translate-y-1/2 pointer-events-none">
<div class="bg-black/60 rounded-lg px-4 py-3 backdrop-blur-sm flex items-center gap-3">
<svg class="w-6 h-6 text-white" fill="currentColor" viewBox="0 0 24 24">
<path d="M20 8.69V4h-4.69L12 .69 8.69 4H4v4.69L.69 12 4 15.31V20h4.69L12 23.31 15.31 20H20v-4.69L23.31 12 20 8.69zM12 18c-3.31 0-6-2.69-6-6s2.69-6 6-6 6 2.69 6 6-2.69 6-6 6zm0-10c-2.21 0-4 1.79-4 4s1.79 4 4 4 4-1.79 4-4-1.79-4-4-4z" />
</svg>
<div class="flex flex-col">
<span class="text-white text-xs font-medium">Brightness</span>
<div class="w-24 h-1 bg-white/30 rounded-full mt-1">
<div class="h-full bg-white rounded-full" style="width: {((brightness - 0.3) / 1.4) * 100}%"></div>
</div>
</div>
</div>
</div>
{/if}
<!-- Loading overlay for seeking -->
{#if isSeeking}
<div class="absolute inset-0 flex items-center justify-center bg-black/50">
<div class="w-12 h-12 border-4 border-white border-t-transparent rounded-full animate-spin"></div>
</div>
{:else if !isPlaying}
<!-- Play/Pause overlay -->
<button
class="absolute inset-0 flex items-center justify-center bg-black/30"
onclick={togglePlayPause}
aria-label="Play"
>
<svg class="w-20 h-20 text-white" fill="currentColor" viewBox="0 0 24 24">
<path d="M8 5v14l11-7z" />
</svg>
</button>
{/if}
</div>
<!-- Controls -->
<div
class="absolute bottom-0 left-0 right-0 bg-gradient-to-t from-black/80 to-transparent p-4 transition-opacity duration-300"
class:opacity-0={!showControls}
class:pointer-events-none={!showControls}
>
<!-- Title -->
<div class="mb-2">
<h2 class="text-white text-lg font-semibold">{media?.name || "Video"}</h2>
</div>
<!-- Progress bar -->
<div class="flex items-center gap-2 mb-2">
<span class="text-white text-sm w-12">{formatTime(currentTime)}</span>
<input
type="range"
min="0"
max={duration || 100}
value={currentTime}
oninput={handleSeekBarInput}
onchange={handleSeekBarChange}
onmousedown={() => isDraggingSeekBar = true}
onmouseup={() => isDraggingSeekBar = false}
ontouchstart={() => isDraggingSeekBar = true}
ontouchend={() => isDraggingSeekBar = false}
class="flex-1 h-1 bg-white/30 rounded-full appearance-none cursor-pointer
[&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3
[&::-webkit-slider-thumb]:bg-white [&::-webkit-slider-thumb]:rounded-full"
/>
<span class="text-white text-sm w-12 text-right">{formatTime(duration)}</span>
</div>
<!-- Control buttons -->
<div class="flex items-center justify-between">
<div class="flex items-center gap-4">
<!-- Play/Pause -->
<button onclick={togglePlayPause} class="text-white hover:text-gray-300" aria-label={isPlaying ? "Pause" : "Play"}>
{#if isPlaying}
<svg class="w-8 h-8" fill="currentColor" viewBox="0 0 24 24">
<path d="M6 19h4V5H6v14zm8-14v14h4V5h-4z" />
</svg>
{:else}
<svg class="w-8 h-8" fill="currentColor" viewBox="0 0 24 24">
<path d="M8 5v14l11-7z" />
</svg>
{/if}
</button>
<!-- Next Episode -->
{#if hasNext}
<button onclick={onNext} class="text-white hover:text-gray-300" aria-label="Next episode">
<svg class="w-7 h-7" fill="currentColor" viewBox="0 0 24 24">
<path d="M6 18l8.5-6L6 6v12zM16 6v12h2V6h-2z" />
</svg>
</button>
{/if}
</div>
<div class="flex items-center gap-4">
<!-- Audio Track Selection -->
{#if audioTracks().length > 1}
<div class="relative">
<button
onclick={toggleAudioTrackMenu}
class="text-white hover:text-gray-300"
aria-label="Select audio track"
>
<svg class="w-6 h-6" fill="currentColor" viewBox="0 0 24 24">
<path d="M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02zM14 3.23v2.06c2.89.86 5 3.54 5 6.71s-2.11 5.85-5 6.71v2.06c4.01-.91 7-4.49 7-8.77s-2.99-7.86-7-8.77z"/>
</svg>
</button>
<!-- Audio Track Menu -->
{#if showAudioTrackMenu}
<div class="absolute bottom-full right-0 mb-2 bg-black/90 backdrop-blur-sm rounded-lg shadow-xl min-w-[200px] max-h-[300px] overflow-y-auto">
<div class="p-2">
<div class="text-white text-sm font-semibold px-3 py-2 border-b border-white/20">
Audio Track
</div>
{#each audioTracks() as track, i}
<button
onclick={() => selectAudioTrack(track.index, i)}
class="w-full text-left px-3 py-2 text-white hover:bg-white/10 rounded transition-colors flex items-center justify-between {selectedAudioTrackIndex === track.index ? 'bg-white/20' : ''}"
>
<span class="text-sm">
{track.displayTitle || track.language || `Track ${i + 1}`}
{#if track.isDefault}
<span class="text-xs text-gray-400 ml-1">(Default)</span>
{/if}
</span>
{#if selectedAudioTrackIndex === track.index}
<svg class="w-4 h-4 text-[var(--color-jellyfin)]" fill="currentColor" viewBox="0 0 24 24">
<path d="M9 16.17L4.83 12l-1.42 1.41L9 19 21 7l-1.41-1.41z"/>
</svg>
{/if}
</button>
{/each}
</div>
</div>
{/if}
</div>
{/if}
<!-- Subtitle Selection -->
{#if subtitleTracks().length > 0}
<div class="relative">
<button
onclick={toggleSubtitleMenu}
class="text-white hover:text-gray-300"
aria-label="Select subtitles"
>
<svg class="w-6 h-6" fill="currentColor" viewBox="0 0 24 24">
<path d="M20 4H4c-1.1 0-2 .9-2 2v12c0 1.1.9 2 2 2h16c1.1 0 2-.9 2-2V6c0-1.1-.9-2-2-2zM4 12h4v2H4v-2zm10 6H4v-2h10v2zm6 0h-4v-2h4v2zm0-4H10v-2h10v2z"/>
</svg>
</button>
<!-- Subtitle Menu -->
{#if showSubtitleMenu}
<div class="absolute bottom-full right-0 mb-2 bg-black/90 backdrop-blur-sm rounded-lg shadow-xl min-w-[200px] max-h-[300px] overflow-y-auto">
<div class="p-2">
<div class="text-white text-sm font-semibold px-3 py-2 border-b border-white/20">
Subtitles
</div>
<!-- Off option -->
<button
onclick={() => selectSubtitle(null)}
class="w-full text-left px-3 py-2 text-white hover:bg-white/10 rounded transition-colors flex items-center justify-between {selectedSubtitleIndex === null ? 'bg-white/20' : ''}"
>
<span class="text-sm">Off</span>
{#if selectedSubtitleIndex === null}
<svg class="w-4 h-4 text-[var(--color-jellyfin)]" fill="currentColor" viewBox="0 0 24 24">
<path d="M9 16.17L4.83 12l-1.42 1.41L9 19 21 7l-1.41-1.41z"/>
</svg>
{/if}
</button>
<!-- Subtitle tracks -->
{#each subtitleTracks() as track, i}
<button
onclick={() => selectSubtitle(track.index, i)}
class="w-full text-left px-3 py-2 text-white hover:bg-white/10 rounded transition-colors flex items-center justify-between {selectedSubtitleIndex === track.index ? 'bg-white/20' : ''}"
>
<div class="flex flex-col">
<span class="text-sm">
{track.displayTitle || track.language || `Track ${track.index}`}
{#if track.isDefault}
<span class="text-xs text-gray-400 ml-1">(Default)</span>
{/if}
{#if track.isForced}
<span class="text-xs text-gray-400 ml-1">(Forced)</span>
{/if}
</span>
{#if track.codec}
<span class="text-xs text-gray-500">{track.codec.toUpperCase()}</span>
{/if}
</div>
{#if selectedSubtitleIndex === track.index}
<svg class="w-4 h-4 text-[var(--color-jellyfin)]" fill="currentColor" viewBox="0 0 24 24">
<path d="M9 16.17L4.83 12l-1.42 1.41L9 19 21 7l-1.41-1.41z"/>
</svg>
{/if}
</button>
{/each}
</div>
</div>
{/if}
</div>
{/if}
<!-- Sleep Timer -->
{#if $sleepTimerActive}
<SleepTimerIndicator onClick={() => { showSleepTimerModal = true; }} />
{:else}
<button
onclick={() => { showSleepTimerModal = true; }}
class="text-white hover:text-gray-300"
aria-label="Sleep timer"
>
<svg class="w-6 h-6" fill="currentColor" viewBox="0 0 24 24">
<path d="M11.99 2C6.47 2 2 6.48 2 12s4.47 10 9.99 10C17.52 22 22 17.52 22 12S17.52 2 11.99 2zM12 20c-4.42 0-8-3.58-8-8s3.58-8 8-8 8 3.58 8 8-3.58 8-8 8zm.5-13H11v6l5.25 3.15.75-1.23-4.5-2.67z" />
</svg>
</button>
{/if}
<!-- Volume Control -->
<VolumeControl size="md" />
<!-- Fullscreen -->
<button onclick={toggleFullscreen} class="text-white hover:text-gray-300" aria-label="Toggle fullscreen">
<svg class="w-6 h-6" fill="currentColor" viewBox="0 0 24 24">
{#if isFullscreen}
<path d="M5 16h3v3h2v-5H5v2zm3-8H5v2h5V5H8v3zm6 11h2v-3h3v-2h-5v5zm2-11V5h-2v5h5V8h-3z" />
{:else}
<path d="M7 14H5v5h5v-2H7v-3zm-2-4h2V7h3V5H5v5zm12 7h-3v2h5v-5h-2v3zM14 5v2h3v3h2V5h-5z" />
{/if}
</svg>
</button>
<!-- Close -->
<button onclick={onClose} class="text-white hover:text-gray-300" aria-label="Close">
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12" />
</svg>
</button>
</div>
</div>
</div>
</div>
<SleepTimerModal isOpen={showSleepTimerModal} onClose={() => { showSleepTimerModal = false; }} mediaType={media?.type} />
<style>
@keyframes fade-out {
0% {
opacity: 1;
transform: translateY(-50%) scale(1);
}
50% {
opacity: 1;
transform: translateY(-50%) scale(1.1);
}
100% {
opacity: 0;
transform: translateY(-50%) scale(0.9);
}
}
.animate-fade-out {
animation: fade-out 0.8s ease-out forwards;
}
</style>