working subs

This commit is contained in:
Alex Kim
2025-12-07 01:19:21 +11:00
parent bc78346760
commit 2648877eb8
23 changed files with 922 additions and 585 deletions

View File

@@ -1,4 +1,3 @@
import { ItemFields } from "@jellyfin/sdk/lib/generated-client/models";
import { useLocalSearchParams } from "expo-router";
import type React from "react";
import { useEffect } from "react";
@@ -21,14 +20,8 @@ const Page: React.FC = () => {
const { offline } = useLocalSearchParams() as { offline?: string };
const isOffline = offline === "true";
const { data: item, isError } = useItemQuery(id, false, undefined, [
ItemFields.MediaSources,
ItemFields.MediaSourceCount,
ItemFields.MediaStreams,
]);
// preload media sources
const { data: itemWithSources } = useItemQuery(id, false, undefined, []);
// Fetch item with all fields including MediaSources
const { data: item, isError } = useItemQuery(id, isOffline, undefined, []);
const opacity = useSharedValue(1);
const animatedStyle = useAnimatedStyle(() => {
@@ -98,13 +91,7 @@ const Page: React.FC = () => {
<View className='h-12 bg-neutral-900 rounded-lg w-full mb-2' />
<View className='h-24 bg-neutral-900 rounded-lg mb-1 w-full' />
</Animated.View>
{item && (
<ItemContent
item={item}
isOffline={isOffline}
itemWithSources={itemWithSources}
/>
)}
{item && <ItemContent item={item} isOffline={isOffline} />}
</View>
);
};

View File

@@ -22,6 +22,8 @@ import { BITRATES } from "@/components/BitrateSelector";
import { Text } from "@/components/common/Text";
import { Loader } from "@/components/Loader";
import { Controls } from "@/components/video-player/controls/Controls";
import { PlayerProvider } from "@/components/video-player/controls/contexts/PlayerContext";
import { VideoProvider } from "@/components/video-player/controls/contexts/VideoContext";
import { useHaptic } from "@/hooks/useHaptic";
import { useOrientation } from "@/hooks/useOrientation";
import { usePlaybackManager } from "@/hooks/usePlaybackManager";
@@ -32,12 +34,17 @@ import {
type MpvPlayerViewRef,
type OnPlaybackStateChangePayload,
type OnProgressEventPayload,
type VideoSource,
} from "@/modules";
import { useDownload } from "@/providers/DownloadProvider";
import { DownloadedItem } from "@/providers/Downloads/types";
import { apiAtom, userAtom } from "@/providers/JellyfinProvider";
import { useSettings } from "@/utils/atoms/settings";
import { getStreamUrl } from "@/utils/jellyfin/media/getStreamUrl";
import {
getMpvAudioId,
getMpvSubtitleId,
} from "@/utils/jellyfin/subtitleUtils";
import { writeToLog } from "@/utils/log";
import { generateDeviceProfile } from "@/utils/profiles/native";
import { msToTicks, ticksToSeconds } from "@/utils/time";
@@ -62,6 +69,7 @@ export default function page() {
const [isMuted, setIsMuted] = useState(false);
const [isBuffering, setIsBuffering] = useState(true);
const [isVideoLoaded, setIsVideoLoaded] = useState(false);
const [trackCount, setTrackCount] = useState(0);
const progress = useSharedValue(0);
const isSeeking = useSharedValue(false);
@@ -223,8 +231,6 @@ export default function page() {
return;
}
const native = generateDeviceProfile();
const transcoding = generateDeviceProfile({ transcode: true });
const res = await getStreamUrl({
api,
item,
@@ -234,7 +240,7 @@ export default function page() {
maxStreamingBitrate: bitrateValue,
mediaSourceId: mediaSourceId,
subtitleStreamIndex: subtitleIndex,
deviceProfile: bitrateValue ? transcoding : native,
deviceProfile: generateDeviceProfile(),
});
if (!res) return;
const { mediaSource, sessionId, url } = res;
@@ -426,6 +432,46 @@ export default function page() {
return ticksToSeconds(getInitialPlaybackTicks());
}, [getInitialPlaybackTicks]);
/** Build video source config for the native player */
const videoSource = useMemo<VideoSource | undefined>(() => {
if (!stream?.url) return undefined;
const mediaSource = stream.mediaSource;
const isTranscoding = Boolean(mediaSource?.TranscodingUrl);
// Get external subtitle URLs
const externalSubs = mediaSource?.MediaStreams?.filter(
(s) =>
s.Type === "Subtitle" &&
s.DeliveryMethod === "External" &&
s.DeliveryUrl,
).map((s) => `${api?.basePath}${s.DeliveryUrl}`);
// Calculate MPV track IDs for initial selection
const initialSubtitleId = getMpvSubtitleId(
mediaSource,
subtitleIndex,
isTranscoding,
);
const initialAudioId = getMpvAudioId(mediaSource, audioIndex);
return {
url: stream.url,
startPosition,
autoplay: true,
externalSubtitles: externalSubs,
initialSubtitleId,
initialAudioId,
};
}, [
stream?.url,
stream?.mediaSource,
startPosition,
api?.basePath,
subtitleIndex,
audioIndex,
]);
const volumeUpCb = useCallback(async () => {
if (Platform.isTV) return;
@@ -565,26 +611,6 @@ export default function page() {
videoRef.current?.seekTo?.(position / 1000);
}, []);
const getSubtitleTracks = useCallback(async () => {
return videoRef.current?.getSubtitleTracks?.() || null;
}, []);
const setSubtitleTrack = useCallback((index: number) => {
videoRef.current?.setSubtitleTrack?.(index);
}, []);
const setSubtitleURL = useCallback((url: string, _customName?: string) => {
videoRef.current?.addSubtitleFile?.(url);
}, []);
const getAudioTracks = useCallback(async () => {
return videoRef.current?.getAudioTracks?.() || null;
}, []);
const setAudioTrack = useCallback((index: number) => {
videoRef.current?.setAudioTrack?.(index);
}, []);
// Apply MPV subtitle settings when video loads
useEffect(() => {
if (!isVideoLoaded || !videoRef.current) return;
@@ -643,81 +669,81 @@ export default function page() {
);
return (
<View
style={{
flex: 1,
backgroundColor: "black",
height: "100%",
width: "100%",
}}
<PlayerProvider
playerRef={videoRef}
item={item}
mediaSource={stream?.mediaSource}
isVideoLoaded={isVideoLoaded}
trackCount={trackCount}
>
<View
style={{
display: "flex",
width: "100%",
height: "100%",
position: "relative",
flexDirection: "column",
justifyContent: "center",
}}
>
<MpvPlayerView
ref={videoRef}
url={stream?.url || ""}
autoplay={true}
style={{ width: "100%", height: "100%" }}
onProgress={onProgress}
onPlaybackStateChange={onPlaybackStateChanged}
onLoad={() => {
setIsVideoLoaded(true);
// Seek to start position after load
if (startPosition > 0) {
videoRef.current?.seekTo(startPosition);
}
<VideoProvider>
<View
style={{
flex: 1,
backgroundColor: "black",
height: "100%",
width: "100%",
}}
onError={(e) => {
console.error("Video Error:", e.nativeEvent);
Alert.alert(
t("player.error"),
t("player.an_error_occured_while_playing_the_video"),
);
writeToLog("ERROR", "Video Error", e.nativeEvent);
}}
/>
</View>
{isMounted === true && item && !isPipMode && (
<Controls
mediaSource={stream?.mediaSource}
item={item}
videoRef={videoRef}
togglePlay={togglePlay}
isPlaying={isPlaying}
isSeeking={isSeeking}
progress={progress}
cacheProgress={cacheProgress}
isBuffering={isBuffering}
showControls={showControls}
setShowControls={setShowControls}
isVideoLoaded={isVideoLoaded}
startPictureInPicture={startPictureInPicture}
play={play}
pause={pause}
seek={seek}
enableTrickplay={true}
getSubtitleTracks={getSubtitleTracks}
getAudioTracks={getAudioTracks}
offline={offline}
setSubtitleTrack={setSubtitleTrack}
setAudioTrack={setAudioTrack}
setSubtitleURL={setSubtitleURL}
aspectRatio={aspectRatio}
scaleFactor={scaleFactor}
setAspectRatio={setAspectRatio}
setScaleFactor={setScaleFactor}
api={api}
downloadedFiles={downloadedFiles}
/>
)}
</View>
>
<View
style={{
display: "flex",
width: "100%",
height: "100%",
position: "relative",
flexDirection: "column",
justifyContent: "center",
}}
>
<MpvPlayerView
ref={videoRef}
source={videoSource}
style={{ width: "100%", height: "100%" }}
onProgress={onProgress}
onPlaybackStateChange={onPlaybackStateChanged}
onLoad={() => setIsVideoLoaded(true)}
onError={(e) => {
console.error("Video Error:", e.nativeEvent);
Alert.alert(
t("player.error"),
t("player.an_error_occured_while_playing_the_video"),
);
writeToLog("ERROR", "Video Error", e.nativeEvent);
}}
onTracksReady={(e) => {
console.log("[Player] Tracks ready:", e.nativeEvent.trackCount);
setTrackCount(e.nativeEvent.trackCount);
}}
/>
</View>
{isMounted === true && item && !isPipMode && (
<Controls
mediaSource={stream?.mediaSource}
item={item}
togglePlay={togglePlay}
isPlaying={isPlaying}
isSeeking={isSeeking}
progress={progress}
cacheProgress={cacheProgress}
isBuffering={isBuffering}
showControls={showControls}
setShowControls={setShowControls}
startPictureInPicture={startPictureInPicture}
play={play}
pause={pause}
seek={seek}
enableTrickplay={true}
offline={offline}
aspectRatio={aspectRatio}
scaleFactor={scaleFactor}
setAspectRatio={setAspectRatio}
setScaleFactor={setScaleFactor}
api={api}
downloadedFiles={downloadedFiles}
/>
)}
</View>
</VideoProvider>
</PlayerProvider>
);
}

View File

@@ -46,11 +46,10 @@ export type SelectedOptions = {
interface ItemContentProps {
item: BaseItemDto;
isOffline: boolean;
itemWithSources?: BaseItemDto | null;
}
export const ItemContent: React.FC<ItemContentProps> = React.memo(
({ item, isOffline, itemWithSources }) => {
({ item, isOffline }) => {
const [api] = useAtom(apiAtom);
const { settings } = useSettings();
const { orientation } = useOrientation();
@@ -72,7 +71,13 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
defaultBitrate,
defaultMediaSource,
defaultSubtitleIndex,
} = useDefaultPlaySettings(item!, settings);
} = useDefaultPlaySettings(item, settings);
console.log("defaultMediaSource", {
defaultAudioIndex,
defaultBitrate,
defaultSubtitleIndex,
});
const logoUrl = useMemo(
() => (item ? getLogoImageUrlById({ api, item }) : null),
@@ -87,7 +92,7 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
useEffect(() => {
setSelectedOptions(() => ({
bitrate: defaultBitrate,
mediaSource: defaultMediaSource,
mediaSource: defaultMediaSource ?? undefined,
subtitleIndex: defaultSubtitleIndex ?? -1,
audioIndex: defaultAudioIndex,
}));
@@ -99,7 +104,7 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
]);
useEffect(() => {
if (!Platform.isTV && itemWithSources) {
if (!Platform.isTV && item) {
navigation.setOptions({
headerRight: () =>
item &&
@@ -109,7 +114,7 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
{item.Type !== "Program" && (
<View className='flex flex-row items-center'>
{!Platform.isTV && (
<DownloadSingleItem item={itemWithSources} size='large' />
<DownloadSingleItem item={item} size='large' />
)}
{user?.Policy?.IsAdministrator && (
<PlayInRemoteSessionButton item={item} size='large' />
@@ -126,7 +131,7 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
{item.Type !== "Program" && (
<View className='flex flex-row items-center space-x-2'>
{!Platform.isTV && (
<DownloadSingleItem item={itemWithSources} size='large' />
<DownloadSingleItem item={item} size='large' />
)}
{user?.Policy?.IsAdministrator && (
<PlayInRemoteSessionButton item={item} size='large' />
@@ -140,7 +145,7 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
)),
});
}
}, [item, navigation, user, itemWithSources]);
}, [item, navigation, user, item]);
useEffect(() => {
if (item) {
@@ -213,7 +218,7 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
<MediaSourceButton
selectedOptions={selectedOptions}
setSelectedOptions={setSelectedOptions}
item={itemWithSources}
item={item}
colors={itemColors}
/>
)}

View File

@@ -104,6 +104,11 @@ export const MediaSourceButton: React.FC<Props> = ({
// Audio track group
if (audioStreams.length > 0) {
console.log("Audio comparison:", {
selectedAudioIndex: selectedOptions.audioIndex,
streamIndices: audioStreams.map((s) => s.Index),
});
groups.push({
title: t("item_card.audio"),
options: audioStreams.map((stream) => ({

View File

@@ -7,7 +7,6 @@ import { useLocalSearchParams, useRouter } from "expo-router";
import {
type Dispatch,
type FC,
type MutableRefObject,
type SetStateAction,
useCallback,
useEffect,
@@ -28,7 +27,6 @@ import { useHaptic } from "@/hooks/useHaptic";
import { useIntroSkipper } from "@/hooks/useIntroSkipper";
import { usePlaybackManager } from "@/hooks/usePlaybackManager";
import { useTrickplay } from "@/hooks/useTrickplay";
import type { AudioTrack, MpvPlayerViewRef, SubtitleTrack } from "@/modules";
import { DownloadedItem } from "@/providers/Downloads/types";
import { useSettings } from "@/utils/atoms/settings";
import { getDefaultPlaySettings } from "@/utils/jellyfin/getDefaultPlaySettings";
@@ -36,7 +34,6 @@ import { ticksToMs } from "@/utils/time";
import { BottomControls } from "./BottomControls";
import { CenterControls } from "./CenterControls";
import { CONTROLS_CONSTANTS } from "./constants";
import { ControlProvider } from "./contexts/ControlContext";
import { EpisodeList } from "./EpisodeList";
import { GestureOverlay } from "./GestureOverlay";
import { HeaderControls } from "./HeaderControls";
@@ -50,31 +47,21 @@ import { type AspectRatio } from "./VideoScalingModeSelector";
interface Props {
item: BaseItemDto;
videoRef: MutableRefObject<MpvPlayerViewRef | null>;
isPlaying: boolean;
isSeeking: SharedValue<boolean>;
cacheProgress: SharedValue<number>;
progress: SharedValue<number>;
isBuffering: boolean;
showControls: boolean;
enableTrickplay?: boolean;
togglePlay: () => void;
setShowControls: (shown: boolean) => void;
offline?: boolean;
isVideoLoaded?: boolean;
mediaSource?: MediaSourceInfo | null;
seek: (ticks: number) => void;
startPictureInPicture?: () => Promise<void>;
play: () => void;
pause: () => void;
getSubtitleTracks?:
| (() => Promise<SubtitleTrack[] | null>)
| (() => SubtitleTrack[]);
getAudioTracks?: (() => Promise<AudioTrack[] | null>) | (() => AudioTrack[]);
setSubtitleURL?: (url: string, customName: string) => void;
setSubtitleTrack?: (index: number) => void;
setAudioTrack?: (index: number) => void;
setVideoAspectRatio?: (aspectRatio: string | null) => Promise<void>;
setVideoScaleFactor?: (scaleFactor: number) => Promise<void>;
aspectRatio?: AspectRatio;
@@ -100,12 +87,6 @@ export const Controls: FC<Props> = ({
showControls,
setShowControls,
mediaSource,
isVideoLoaded,
getSubtitleTracks,
getAudioTracks,
setSubtitleURL,
setSubtitleTrack,
setAudioTrack,
setVideoAspectRatio,
setVideoScaleFactor,
aspectRatio = "default",
@@ -348,12 +329,10 @@ export const Controls: FC<Props> = ({
mediaSource: newMediaSource,
audioIndex: defaultAudioIndex,
subtitleIndex: defaultSubtitleIndex,
} = getDefaultPlaySettings(
item,
settings,
previousIndexes,
mediaSource ?? undefined,
);
} = getDefaultPlaySettings(item, settings, {
indexes: previousIndexes,
source: mediaSource ?? undefined,
});
const queryParams = new URLSearchParams({
...(offline && { offline: "true" }),
@@ -468,11 +447,7 @@ export const Controls: FC<Props> = ({
}, [isPlaying, togglePlay]);
return (
<ControlProvider
item={item}
mediaSource={mediaSource}
isVideoLoaded={isVideoLoaded}
>
<>
{episodeView ? (
<EpisodeList
item={item}
@@ -504,11 +479,6 @@ export const Controls: FC<Props> = ({
goToNextItem={goToNextItem}
previousItem={previousItem}
nextItem={nextItem}
getSubtitleTracks={getSubtitleTracks}
getAudioTracks={getAudioTracks}
setSubtitleTrack={setSubtitleTrack}
setAudioTrack={setAudioTrack}
setSubtitleURL={setSubtitleURL}
aspectRatio={aspectRatio}
scaleFactor={scaleFactor}
setAspectRatio={setAspectRatio}
@@ -570,6 +540,6 @@ export const Controls: FC<Props> = ({
{settings.maxAutoPlayEpisodeCount.value !== -1 && (
<ContinueWatchingOverlay goToNextItem={handleContinueWatching} />
)}
</ControlProvider>
</>
);
};

View File

@@ -15,7 +15,6 @@ import { useSafeAreaInsets } from "react-native-safe-area-context";
import { useHaptic } from "@/hooks/useHaptic";
import { useSettings } from "@/utils/atoms/settings";
import { ICON_SIZES } from "./constants";
import { VideoProvider } from "./contexts/VideoContext";
import DropdownView from "./dropdown/DropdownView";
import { type ScaleFactor, ScaleFactorSelector } from "./ScaleFactorSelector";
import {
@@ -34,11 +33,6 @@ interface HeaderControlsProps {
goToNextItem: (options: { isAutoPlay?: boolean }) => void;
previousItem?: BaseItemDto | null;
nextItem?: BaseItemDto | null;
getSubtitleTracks?: (() => Promise<any[] | null>) | (() => any[]);
getAudioTracks?: (() => Promise<any[] | null>) | (() => any[]);
setSubtitleTrack?: (index: number) => void;
setAudioTrack?: (index: number) => void;
setSubtitleURL?: (url: string, customName: string) => void;
aspectRatio?: AspectRatio;
scaleFactor?: ScaleFactor;
setAspectRatio?: Dispatch<SetStateAction<AspectRatio>>;
@@ -58,11 +52,6 @@ export const HeaderControls: FC<HeaderControlsProps> = ({
goToNextItem,
previousItem,
nextItem,
getSubtitleTracks,
getAudioTracks,
setSubtitleTrack,
setAudioTrack,
setSubtitleURL,
aspectRatio = "default",
scaleFactor = 1.0,
setAspectRatio,
@@ -113,17 +102,9 @@ export const HeaderControls: FC<HeaderControlsProps> = ({
>
<View className='mr-auto' pointerEvents='box-none'>
{!Platform.isTV && (!offline || !mediaSource?.TranscodingUrl) && (
<VideoProvider
getSubtitleTracks={getSubtitleTracks}
getAudioTracks={getAudioTracks}
setSubtitleTrack={setSubtitleTrack}
setAudioTrack={setAudioTrack}
setSubtitleURL={setSubtitleURL}
>
<View pointerEvents='auto'>
<DropdownView />
</View>
</VideoProvider>
<View pointerEvents='auto'>
<DropdownView />
</View>
)}
</View>

View File

@@ -1,44 +0,0 @@
import type {
BaseItemDto,
MediaSourceInfo,
} from "@jellyfin/sdk/lib/generated-client";
import type React from "react";
import { createContext, type ReactNode, useContext } from "react";
interface ControlContextProps {
item: BaseItemDto;
mediaSource: MediaSourceInfo | null | undefined;
isVideoLoaded: boolean | undefined;
}
const ControlContext = createContext<ControlContextProps | undefined>(
undefined,
);
interface ControlProviderProps {
children: ReactNode;
item: BaseItemDto;
mediaSource: MediaSourceInfo | null | undefined;
isVideoLoaded: boolean | undefined;
}
export const ControlProvider: React.FC<ControlProviderProps> = ({
children,
item,
mediaSource,
isVideoLoaded,
}) => {
return (
<ControlContext.Provider value={{ item, mediaSource, isVideoLoaded }}>
{children}
</ControlContext.Provider>
);
};
export const useControlContext = () => {
const context = useContext(ControlContext);
if (context === undefined) {
throw new Error("useControlContext must be used within a ControlProvider");
}
return context;
};

View File

@@ -0,0 +1,107 @@
import type {
BaseItemDto,
MediaSourceInfo,
} from "@jellyfin/sdk/lib/generated-client";
import React, {
createContext,
type MutableRefObject,
type ReactNode,
useContext,
useMemo,
} from "react";
import type { MpvPlayerViewRef } from "@/modules";
interface PlayerContextProps {
playerRef: MutableRefObject<MpvPlayerViewRef | null>;
item: BaseItemDto;
mediaSource: MediaSourceInfo | null | undefined;
isVideoLoaded: boolean;
trackCount: number;
}
const PlayerContext = createContext<PlayerContextProps | undefined>(undefined);
interface PlayerProviderProps {
children: ReactNode;
playerRef: MutableRefObject<MpvPlayerViewRef | null>;
item: BaseItemDto;
mediaSource: MediaSourceInfo | null | undefined;
isVideoLoaded: boolean;
trackCount: number;
}
export const PlayerProvider: React.FC<PlayerProviderProps> = ({
children,
playerRef,
item,
mediaSource,
isVideoLoaded,
trackCount,
}) => {
const value = useMemo(
() => ({ playerRef, item, mediaSource, isVideoLoaded, trackCount }),
[playerRef, item, mediaSource, isVideoLoaded, trackCount],
);
return (
<PlayerContext.Provider value={value}>{children}</PlayerContext.Provider>
);
};
// Core context hook
export const usePlayerContext = () => {
const context = useContext(PlayerContext);
if (!context)
throw new Error("usePlayerContext must be used within PlayerProvider");
return context;
};
// Player controls hook
export const usePlayerControls = () => {
const { playerRef } = usePlayerContext();
return {
// Subtitle controls
getSubtitleTracks: async () => {
return playerRef.current?.getSubtitleTracks() ?? null;
},
setSubtitleTrack: (trackId: number) => {
playerRef.current?.setSubtitleTrack(trackId);
},
disableSubtitles: () => {
playerRef.current?.disableSubtitles();
},
addSubtitleFile: (url: string, select = true) => {
playerRef.current?.addSubtitleFile(url, select);
},
// Audio controls
getAudioTracks: async () => {
return playerRef.current?.getAudioTracks() ?? null;
},
setAudioTrack: (trackId: number) => {
playerRef.current?.setAudioTrack(trackId);
},
// Playback controls
play: () => playerRef.current?.play(),
pause: () => playerRef.current?.pause(),
seekTo: (position: number) => playerRef.current?.seekTo(position),
seekBy: (offset: number) => playerRef.current?.seekBy(offset),
setSpeed: (speed: number) => playerRef.current?.setSpeed(speed),
// Subtitle positioning
setSubtitleScale: (scale: number) =>
playerRef.current?.setSubtitleScale(scale),
setSubtitlePosition: (position: number) =>
playerRef.current?.setSubtitlePosition(position),
setSubtitleMarginY: (margin: number) =>
playerRef.current?.setSubtitleMarginY(margin),
setSubtitleFontSize: (size: number) =>
playerRef.current?.setSubtitleFontSize(size),
// PiP
startPictureInPicture: () => playerRef.current?.startPictureInPicture(),
stopPictureInPicture: () => playerRef.current?.stopPictureInPicture(),
};
};

View File

@@ -1,4 +1,73 @@
import { SubtitleDeliveryMethod } from "@jellyfin/sdk/lib/generated-client";
/**
* VideoContext.tsx
*
* Manages subtitle and audio track state for the video player UI.
*
* ============================================================================
* INDEX TYPES
* ============================================================================
*
* We track two different indices for each track:
*
* 1. SERVER INDEX (sub.Index / track.index)
* - Jellyfin's server-side stream index
* - Used to report playback state to Jellyfin server
* - Allows Jellyfin to remember user's last selected tracks
* - Passed via router params (subtitleIndex, audioIndex)
* - Value of -1 means disabled/none
*
* 2. MPV INDEX (track.mpvIndex)
* - MPV's internal track ID for the loaded track
* - Used to actually switch tracks in the player
* - Only assigned to tracks that are loaded into MPV
* - Value of -1 means track is not in MPV (e.g., burned-in image sub)
*
* ============================================================================
* SUBTITLE DELIVERY METHODS
* ============================================================================
*
* Jellyfin provides subtitles via different delivery methods:
* - Embed: Subtitle is embedded in the container (MKV, MP4, etc.)
* - Hls: Subtitle is delivered via HLS segments (during transcoding)
* - External: Subtitle is delivered as a separate file URL
* - Encode: Subtitle is burned into the video (image-based subs during transcode)
*
* Jellyfin also provides `IsTextSubtitleStream` boolean:
* - true: Text-based subtitle (SRT, ASS, VTT, etc.)
* - false: Image-based subtitle (PGS, VOBSUB, DVDSUB, etc.)
*
* ============================================================================
* SUBTITLE TYPES AND HOW THEY'RE HANDLED
* ============================================================================
*
* 1. TEXT-BASED SUBTITLES (IsTextSubtitleStream = true)
* - Direct Play: Loaded into MPV (embedded or via sub-add for external)
* - Transcoding: Delivered via HLS, loaded into MPV
* - Action: Use playerControls.setSubtitleTrack(mpvId)
*
* 2. IMAGE-BASED SUBTITLES (IsTextSubtitleStream = false)
* - Direct Play: Embedded ones are in MPV, external ones are filtered out
* - Transcoding: BURNED INTO VIDEO by Jellyfin (not in MPV track list)
* - Action: When transcoding, use replacePlayer() to request burn-in
*
* ============================================================================
* MPV INDEX CALCULATION
* ============================================================================
*
* We iterate through Jellyfin's subtitle list and assign MPV indices only to
* subtitles that are actually loaded into MPV:
*
* - isSubtitleInMpv = true: Subtitle is in MPV's track list, increment index
* - isSubtitleInMpv = false: Subtitle is NOT in MPV (e.g., image sub during
* transcode), do NOT increment index
*
* The order of subtitles in Jellyfin's MediaStreams matches the order in MPV.
*/
import {
type MediaStream,
SubtitleDeliveryMethod,
} from "@jellyfin/sdk/lib/generated-client";
import { router, useLocalSearchParams } from "expo-router";
import type React from "react";
import {
@@ -11,53 +80,23 @@ import {
} from "react";
import type { AudioTrack, SubtitleTrack } from "@/modules";
import type { Track } from "../types";
import { useControlContext } from "./ControlContext";
import { usePlayerContext, usePlayerControls } from "./PlayerContext";
interface VideoContextProps {
subtitleTracks: Track[] | null;
audioTracks: Track[] | null;
setSubtitleTrack: ((index: number) => void) | undefined;
setSubtitleURL: ((url: string, customName: string) => void) | undefined;
}
const VideoContext = createContext<VideoContextProps | undefined>(undefined);
interface VideoProviderProps {
children: ReactNode;
getSubtitleTracks:
| (() => Promise<SubtitleTrack[] | null>)
| (() => SubtitleTrack[])
| undefined;
getAudioTracks:
| (() => Promise<AudioTrack[] | null>)
| (() => AudioTrack[])
| undefined;
setSubtitleTrack: ((index: number) => void) | undefined;
setAudioTrack: ((index: number) => void) | undefined;
setSubtitleURL: ((url: string, customName: string) => void) | undefined;
}
/**
s * Video context provider for managing subtitle and audio tracks.
* MPV player is used for all playback.
*/
export const VideoProvider: React.FC<VideoProviderProps> = ({
export const VideoProvider: React.FC<{ children: ReactNode }> = ({
children,
getSubtitleTracks,
getAudioTracks,
setSubtitleTrack,
setAudioTrack,
setSubtitleURL,
}) => {
const [subtitleTracks, setSubtitleTracks] = useState<Track[] | null>(null);
const [audioTracks, setAudioTracks] = useState<Track[] | null>(null);
const ControlContext = useControlContext();
const isVideoLoaded = ControlContext?.isVideoLoaded;
const mediaSource = ControlContext?.mediaSource;
const allSubs =
mediaSource?.MediaStreams?.filter((s) => s.Type === "Subtitle") || [];
const { trackCount, mediaSource } = usePlayerContext();
const playerControls = usePlayerControls();
const { itemId, audioIndex, bitrateValue, subtitleIndex, playbackPosition } =
useLocalSearchParams<{
@@ -69,172 +108,167 @@ export const VideoProvider: React.FC<VideoProviderProps> = ({
playbackPosition: string;
}>();
const onTextBasedSubtitle = useMemo(() => {
return (
allSubs.find(
(s) =>
s.Index?.toString() === subtitleIndex &&
(s.DeliveryMethod === SubtitleDeliveryMethod.Embed ||
s.DeliveryMethod === SubtitleDeliveryMethod.Hls ||
s.DeliveryMethod === SubtitleDeliveryMethod.External),
) || subtitleIndex === "-1"
const allSubs =
mediaSource?.MediaStreams?.filter((s) => s.Type === "Subtitle") || [];
const allAudio =
mediaSource?.MediaStreams?.filter((s) => s.Type === "Audio") || [];
const isTranscoding = Boolean(mediaSource?.TranscodingUrl);
/** Check if subtitle is image-based (PGS, VOBSUB, etc.) */
const isImageBased = (sub: MediaStream): boolean =>
sub.IsTextSubtitleStream === false;
/**
* Check if the currently selected subtitle is image-based.
* Used to determine if we need to refresh the player when changing subs.
*/
const isCurrentSubImageBased = useMemo(() => {
if (subtitleIndex === "-1") return false;
const currentSub = allSubs.find(
(s) => s.Index?.toString() === subtitleIndex,
);
return currentSub ? isImageBased(currentSub) : false;
}, [allSubs, subtitleIndex]);
const setPlayerParams = ({
chosenAudioIndex = audioIndex,
chosenSubtitleIndex = subtitleIndex,
}: {
chosenAudioIndex?: string;
chosenSubtitleIndex?: string;
/**
* Refresh the player with new parameters.
* This triggers Jellyfin to re-process the stream (e.g., burn in image subs).
*/
const replacePlayer = (params: {
audioIndex?: string;
subtitleIndex?: string;
}) => {
console.log("chosenSubtitleIndex", chosenSubtitleIndex);
const queryParams = new URLSearchParams({
itemId: itemId ?? "",
audioIndex: chosenAudioIndex,
subtitleIndex: chosenSubtitleIndex,
audioIndex: params.audioIndex ?? audioIndex,
subtitleIndex: params.subtitleIndex ?? subtitleIndex,
mediaSourceId: mediaSource?.Id ?? "",
bitrateValue: bitrateValue,
playbackPosition: playbackPosition,
}).toString();
router.replace(`player/direct-player?${queryParams}` as any);
};
const setTrackParams = (
_type: "subtitle",
index: number,
serverIndex: number,
) => {
// If we're transcoding and we're going from a image based subtitle
// to a text based subtitle, we need to change the player params.
const shouldChangePlayerParams =
mediaSource?.TranscodingUrl && !onTextBasedSubtitle;
console.log("Set player params", index, serverIndex);
if (shouldChangePlayerParams) {
setPlayerParams({
chosenSubtitleIndex: serverIndex.toString(),
});
return;
/**
* Determine if a subtitle is available in MPV's track list.
*
* A subtitle is in MPV if:
* - Delivery is Embed/Hls/External AND not an image-based sub during transcode
*/
const isSubtitleInMpv = (sub: MediaStream): boolean => {
// During transcoding, image-based subs are burned in, not in MPV
if (isTranscoding && isImageBased(sub)) {
return false;
}
setSubtitleTrack?.(serverIndex);
router.setParams({
subtitleIndex: serverIndex.toString(),
});
// Embed/Hls/External methods mean the sub is loaded into MPV
return (
sub.DeliveryMethod === SubtitleDeliveryMethod.Embed ||
sub.DeliveryMethod === SubtitleDeliveryMethod.Hls ||
sub.DeliveryMethod === SubtitleDeliveryMethod.External
);
};
// Fetch tracks when track count changes
useEffect(() => {
if (trackCount === 0) return;
const fetchTracks = async () => {
if (getSubtitleTracks) {
let subtitleData: SubtitleTrack[] | null = null;
try {
subtitleData = await getSubtitleTracks();
console.log("subtitleData", subtitleData);
} catch (error) {
console.log("[VideoContext] Failed to get subtitle tracks:", error);
return;
}
const [subtitleData, audioData] = await Promise.all([
playerControls.getSubtitleTracks().catch(() => null),
playerControls.getAudioTracks().catch(() => null),
]);
let embedSubIndex = 1;
const processedSubs: Track[] = allSubs?.map((sub) => {
/** A boolean value determining if we should increment the embedSubIndex */
const shouldIncrement =
sub.DeliveryMethod === SubtitleDeliveryMethod.Embed ||
sub.DeliveryMethod === SubtitleDeliveryMethod.Hls ||
sub.DeliveryMethod === SubtitleDeliveryMethod.External;
/** The index of subtitle inside MPV Player itself */
const mpvIndex = subtitleData?.at(embedSubIndex)?.id ?? -1;
if (shouldIncrement) embedSubIndex++;
return {
name: sub.DisplayTitle || "Undefined Subtitle",
index: sub.Index ?? -1,
setTrack: () =>
shouldIncrement
? setTrackParams("subtitle", mpvIndex, sub.Index ?? -1)
: setPlayerParams({
chosenSubtitleIndex: sub.Index?.toString(),
}),
};
});
// Process subtitles - map Jellyfin indices to MPV track IDs
let mpvIndex = 0; // MPV track index counter (only incremented for subs in MPV)
// Step 3: Restore the original order
const subtitles: Track[] = processedSubs.sort(
(a, b) => a.index - b.index,
);
const subs: Track[] = allSubs.map((sub) => {
const inMpv = isSubtitleInMpv(sub);
// Add a "Disable Subtitles" option
subtitles.unshift({
name: "Disable",
index: -1,
setTrack: () =>
!mediaSource?.TranscodingUrl || onTextBasedSubtitle
? setTrackParams("subtitle", -1, -1)
: setPlayerParams({ chosenSubtitleIndex: "-1" }),
});
setSubtitleTracks(subtitles);
}
// Get MPV track ID: only if this sub is actually in MPV's track list
const mpvId = inMpv
? ((subtitleData as SubtitleTrack[])?.[mpvIndex++]?.id ?? -1)
: -1;
return {
name: sub.DisplayTitle || "Unknown",
index: sub.Index ?? -1, // Jellyfin server-side index
mpvIndex: mpvId, // MPV track ID (-1 if not in MPV)
setTrack: () => {
// Case 1: Transcoding + switching to/from image-based sub
// Need to refresh player so Jellyfin burns in the new sub
if (
isTranscoding &&
(isImageBased(sub) || isCurrentSubImageBased)
) {
replacePlayer({ subtitleIndex: String(sub.Index) });
return;
}
// Case 2: Subtitle is in MPV - just switch tracks
if (inMpv && mpvId !== -1) {
playerControls.setSubtitleTrack(mpvId);
router.setParams({ subtitleIndex: String(sub.Index) });
return;
}
// Case 3: Fallback - refresh player
replacePlayer({ subtitleIndex: String(sub.Index) });
},
};
});
// Add "Disable" option at the beginning
subs.unshift({
name: "Disable",
index: -1,
setTrack: () => {
// If currently using image-based sub during transcode, need to refresh
if (isTranscoding && isCurrentSubImageBased) {
replacePlayer({ subtitleIndex: "-1" });
} else {
playerControls.setSubtitleTrack(-1);
router.setParams({ subtitleIndex: "-1" });
}
},
});
// Process audio tracks
const audio: Track[] = allAudio.map((a, idx) => ({
name: a.DisplayTitle || "Unknown",
index: a.Index ?? -1,
setTrack: () => {
// Transcoding: need full player refresh to change audio stream
if (isTranscoding) {
replacePlayer({ audioIndex: String(a.Index) });
return;
}
// Direct play: just switch audio track in MPV
const mpvId = (audioData as AudioTrack[])?.[idx]?.id ?? idx + 1;
playerControls.setAudioTrack(mpvId);
router.setParams({ audioIndex: String(a.Index) });
},
}));
setSubtitleTracks(subs.sort((a, b) => a.index - b.index));
setAudioTracks(audio);
};
fetchTracks();
}, [isVideoLoaded, getSubtitleTracks]);
// Fetch audio tracks
useEffect(() => {
const fetchAudioTracks = async () => {
if (getAudioTracks) {
let audioData: AudioTrack[] | null = null;
try {
audioData = await getAudioTracks();
console.log("audioData", audioData);
} catch (error) {
console.log("[VideoContext] Failed to get audio tracks:", error);
return;
}
const allAudio =
mediaSource?.MediaStreams?.filter((s) => s.Type === "Audio") || [];
let embedAudioIndex = 0;
const processedAudio: Track[] = allAudio?.map((audio) => {
const mpvIndex = audioData?.at(embedAudioIndex)?.id ?? 1;
embedAudioIndex++;
return {
name: audio.DisplayTitle || "Undefined Audio",
index: audio.Index ?? -1,
setTrack: () => {
setAudioTrack?.(mpvIndex);
router.setParams({
audioIndex: audio.Index?.toString() ?? "0",
});
},
};
});
setAudioTracks(processedAudio);
}
};
fetchAudioTracks();
}, [isVideoLoaded, getAudioTracks]);
}, [trackCount, mediaSource]);
return (
<VideoContext.Provider
value={{
subtitleTracks,
audioTracks,
setSubtitleTrack,
setSubtitleURL,
}}
>
<VideoContext.Provider value={{ subtitleTracks, audioTracks }}>
{children}
</VideoContext.Provider>
);
};
export const useVideoContext = () => {
const context = useContext(VideoContext);
if (context === undefined) {
throw new Error("useVideoContext must be used within a VideoProvider");
}
return context;
const ctx = useContext(VideoContext);
if (!ctx)
throw new Error("useVideoContext must be used within VideoProvider");
return ctx;
};

View File

@@ -7,17 +7,12 @@ import {
type OptionGroup,
PlatformDropdown,
} from "@/components/PlatformDropdown";
import { useControlContext } from "../contexts/ControlContext";
import { usePlayerContext } from "../contexts/PlayerContext";
import { useVideoContext } from "../contexts/VideoContext";
const DropdownView = () => {
const videoContext = useVideoContext();
const { subtitleTracks, audioTracks } = videoContext;
const ControlContext = useControlContext();
const [item, mediaSource] = [
ControlContext?.item,
ControlContext?.mediaSource,
];
const { subtitleTracks, audioTracks } = useVideoContext();
const { item, mediaSource } = usePlayerContext();
const router = useRouter();
const { subtitleIndex, audioIndex, bitrateValue, playbackPosition, offline } =

View File

@@ -1,51 +1,29 @@
import { type BaseItemDto } from "@jellyfin/sdk/lib/generated-client";
import { useMemo } from "react";
import { BITRATES } from "@/components/BitrateSelector";
import type { Settings } from "@/utils/atoms/settings";
import { getDefaultPlaySettings } from "@/utils/jellyfin/getDefaultPlaySettings";
// Used only for initial play settings.
const useDefaultPlaySettings = (
item: BaseItemDto,
settings: Settings | null,
) => {
const playSettings = useMemo(() => {
// 1. Get first media source
const mediaSource = item.MediaSources?.[0];
/**
* React hook wrapper for getDefaultPlaySettings.
* Used in UI components for initial playback (no previous track state).
*/
const useDefaultPlaySettings = (item: BaseItemDto, settings: Settings | null) =>
useMemo(() => {
const { mediaSource, audioIndex, subtitleIndex, bitrate } =
getDefaultPlaySettings(item, settings);
// 2. Get default or preferred audio
const defaultAudioIndex = mediaSource?.DefaultAudioStreamIndex;
const preferedAudioIndex = mediaSource?.MediaStreams?.find(
(x) =>
x.Type === "Audio" &&
x.Language ===
settings?.defaultAudioLanguage?.ThreeLetterISOLanguageName,
)?.Index;
const firstAudioIndex = mediaSource?.MediaStreams?.find(
(x) => x.Type === "Audio",
)?.Index;
// 4. Get default bitrate from settings or fallback to max
let bitrate = settings?.defaultBitrate ?? BITRATES[0];
// value undefined seems to get lost in settings. This is just a failsafe
if (bitrate.key === BITRATES[0].key) {
bitrate = BITRATES[0];
}
console.log("defaultPlaySettings", {
audioIndex,
subtitleIndex,
bitrate,
});
return {
defaultAudioIndex:
preferedAudioIndex ?? defaultAudioIndex ?? firstAudioIndex ?? undefined,
defaultSubtitleIndex: mediaSource?.DefaultSubtitleStreamIndex ?? -1,
defaultMediaSource: mediaSource ?? undefined,
defaultBitrate: bitrate ?? undefined,
defaultMediaSource: mediaSource,
defaultAudioIndex: audioIndex,
defaultSubtitleIndex: subtitleIndex,
defaultBitrate: bitrate,
};
}, [
item.MediaSources,
settings?.defaultAudioLanguage,
settings?.defaultSubtitleLanguage,
]);
return playSettings;
};
}, [item, settings]);
export default useDefaultPlaySettings;

View File

@@ -17,8 +17,10 @@ export type {
OnLoadEventPayload,
OnPlaybackStateChangePayload,
OnProgressEventPayload,
OnTracksReadyEventPayload,
SubtitleTrack,
SubtitleTrack as TrackInfo,
VideoSource,
} from "./mpv-player";
// MPV Player - Main exports
export { MpvPlayerView } from "./mpv-player";

View File

@@ -9,6 +9,7 @@ protocol MPVSoftwareRendererDelegate: AnyObject {
func renderer(_ renderer: MPVSoftwareRenderer, didChangePause isPaused: Bool)
func renderer(_ renderer: MPVSoftwareRenderer, didChangeLoading isLoading: Bool)
func renderer(_ renderer: MPVSoftwareRenderer, didBecomeReadyToSeek: Bool)
func renderer(_ renderer: MPVSoftwareRenderer, didUpdateTrackList trackCount: Int)
}
final class MPVSoftwareRenderer {
@@ -43,6 +44,9 @@ final class MPVSoftwareRenderer {
private var currentPreset: PlayerPreset?
private var currentURL: URL?
private var currentHeaders: [String: String]?
private var pendingExternalSubtitles: [String] = []
private var initialSubtitleId: Int?
private var initialAudioId: Int?
private var disposeBag: [() -> Void] = []
@@ -50,6 +54,9 @@ final class MPVSoftwareRenderer {
private var isStopping = false
private var shouldClearPixelBuffer = false
private let bgraFormatCString: [CChar] = Array("bgra\0".utf8CString)
private let maxInFlightBuffers = 3
private var inFlightBufferCount = 0
private let inFlightLock = NSLock()
weak var delegate: MPVSoftwareRendererDelegate?
private var cachedDuration: Double = 0
@@ -107,10 +114,11 @@ final class MPVSoftwareRenderer {
setOption(name: "demuxer-max-bytes", value: "150M")
setOption(name: "demuxer-readahead-secs", value: "20")
// Subtitle options - blend into video for software renderer
setOption(name: "sub-auto", value: "yes")
setOption(name: "subs-fallback", value: "yes")
// Subtitle options - use vf=sub to burn subtitles into video frames
// This happens at the filter level, BEFORE the software renderer
setOption(name: "vf", value: "sub")
setOption(name: "sub-visibility", value: "yes")
let initStatus = mpv_initialize(handle)
guard initStatus >= 0 else {
throw RendererError.mpvInitialization(initStatus)
@@ -190,10 +198,21 @@ final class MPVSoftwareRenderer {
isStopping = false
}
func load(url: URL, with preset: PlayerPreset, headers: [String: String]? = nil) {
func load(
url: URL,
with preset: PlayerPreset,
headers: [String: String]? = nil,
startPosition: Double? = nil,
externalSubtitles: [String]? = nil,
initialSubtitleId: Int? = nil,
initialAudioId: Int? = nil
) {
currentPreset = preset
currentURL = url
currentHeaders = headers
pendingExternalSubtitles = externalSubtitles ?? []
self.initialSubtitleId = initialSubtitleId
self.initialAudioId = initialAudioId
renderQueue.async { [weak self] in
guard let self else { return }
@@ -203,16 +222,38 @@ final class MPVSoftwareRenderer {
guard let self else { return }
self.delegate?.renderer(self, didChangeLoading: true)
}
}
guard let handle = mpv else { return }
renderQueue.async { [weak self] in
guard let self else { return }
guard let handle = self.mpv else { return }
self.apply(commands: preset.commands, on: handle)
self.command(handle, ["stop"])
// Sync stop to ensure previous playback is stopped before loading new file
self.commandSync(handle, ["stop"])
self.updateHTTPHeaders(headers)
// Set start position using property (setOption only works before mpv_initialize)
if let startPos = startPosition, startPos > 0 {
self.setProperty(name: "start", value: String(format: "%.2f", startPos))
} else {
self.setProperty(name: "start", value: "0")
}
// Set initial audio track if specified
if let audioId = self.initialAudioId, audioId > 0 {
self.setAudioTrack(audioId)
}
// Set initial subtitle track if no external subs (external subs change track IDs)
if self.pendingExternalSubtitles.isEmpty {
if let subId = self.initialSubtitleId {
self.setSubtitleTrack(subId)
} else {
self.disableSubtitles()
}
} else {
// External subs will be added after file loads, set sid then
self.disableSubtitles()
}
var finalURL = url
if !url.isFileURL {
finalURL = url
@@ -317,7 +358,8 @@ final class MPVSoftwareRenderer {
("dheight", MPV_FORMAT_INT64),
("duration", MPV_FORMAT_DOUBLE),
("time-pos", MPV_FORMAT_DOUBLE),
("pause", MPV_FORMAT_FLAG)
("pause", MPV_FORMAT_FLAG),
("track-list/count", MPV_FORMAT_INT64) // Notify when tracks are available
]
for (name, format) in properties {
@@ -792,6 +834,7 @@ final class MPVSoftwareRenderer {
}
}
/// Async command - returns immediately, mpv processes later
private func command(_ handle: OpaquePointer, _ args: [String]) {
guard !args.isEmpty else { return }
_ = withCStringArray(args) { pointer in
@@ -799,6 +842,14 @@ final class MPVSoftwareRenderer {
}
}
/// Sync command - waits for mpv to process before returning
private func commandSync(_ handle: OpaquePointer, _ args: [String]) -> Int32 {
guard !args.isEmpty else { return -1 }
return withCStringArray(args) { pointer in
mpv_command(handle, pointer)
}
}
private func processEvents() {
eventQueueGroup.enter()
let group = eventQueueGroup
@@ -821,6 +872,22 @@ final class MPVSoftwareRenderer {
case MPV_EVENT_VIDEO_RECONFIG:
refreshVideoState()
case MPV_EVENT_FILE_LOADED:
// Add external subtitles now that the file is loaded
let hadExternalSubs = !pendingExternalSubtitles.isEmpty
if hadExternalSubs, let handle = mpv {
for subUrl in pendingExternalSubtitles {
command(handle, ["sub-add", subUrl])
}
pendingExternalSubtitles = []
// Set subtitle after external subs are added (track IDs have changed)
if let subId = initialSubtitleId {
setSubtitleTrack(subId)
} else {
disableSubtitles()
}
}
if !isReadyToSeek {
isReadyToSeek = true
DispatchQueue.main.async { [weak self] in
@@ -887,6 +954,16 @@ final class MPVSoftwareRenderer {
delegate?.renderer(self, didChangePause: isPaused)
}
}
case "track-list/count":
var trackCount: Int64 = 0
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_INT64, value: &trackCount)
if status >= 0 && trackCount > 0 {
Logger.shared.log("Track list updated: \(trackCount) tracks available", type: "Info")
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.delegate?.renderer(self, didUpdateTrackList: Int(trackCount))
}
}
default:
break
}
@@ -949,12 +1026,14 @@ final class MPVSoftwareRenderer {
func seek(to seconds: Double) {
guard let handle = mpv else { return }
let clamped = max(0, seconds)
command(handle, ["seek", String(clamped), "absolute"])
// Sync seek for accurate positioning
commandSync(handle, ["seek", String(clamped), "absolute"])
}
func seek(by seconds: Double) {
guard let handle = mpv else { return }
command(handle, ["seek", String(seconds), "relative"])
// Sync seek for accurate positioning
commandSync(handle, ["seek", String(seconds), "relative"])
}
func setSpeed(_ speed: Double) {
@@ -979,7 +1058,6 @@ final class MPVSoftwareRenderer {
var trackCount: Int64 = 0
getProperty(handle: handle, name: "track-list/count", format: MPV_FORMAT_INT64, value: &trackCount)
Logger.shared.log("getSubtitleTracks: total track count = \(trackCount)", type: "Info")
for i in 0..<trackCount {
var trackType: String?
@@ -1016,7 +1094,18 @@ final class MPVSoftwareRenderer {
func setSubtitleTrack(_ trackId: Int) {
Logger.shared.log("setSubtitleTrack: setting sid to \(trackId)", type: "Info")
setProperty(name: "sid", value: String(trackId))
guard let handle = mpv else {
Logger.shared.log("setSubtitleTrack: mpv handle is nil!", type: "Error")
return
}
// Use setProperty for synchronous behavior (command is async)
if trackId < 0 {
// Disable subtitles
setProperty(name: "sid", value: "no")
} else {
setProperty(name: "sid", value: String(trackId))
}
}
func disableSubtitles() {
@@ -1030,9 +1119,11 @@ final class MPVSoftwareRenderer {
return Int(sid)
}
func addSubtitleFile(url: String) {
func addSubtitleFile(url: String, select: Bool = true) {
guard let handle = mpv else { return }
command(handle, ["sub-add", url])
// "cached" adds without selecting, "select" adds and selects
let flag = select ? "select" : "cached"
commandSync(handle, ["sub-add", url, flag])
}
// MARK: - Subtitle Positioning
@@ -1117,7 +1208,13 @@ final class MPVSoftwareRenderer {
}
func setAudioTrack(_ trackId: Int) {
guard let handle = mpv else {
Logger.shared.log("setAudioTrack: mpv handle is nil", type: "Warn")
return
}
Logger.shared.log("setAudioTrack: setting aid to \(trackId)", type: "Info")
// Use setProperty for synchronous behavior
setProperty(name: "aid", value: String(trackId))
}

View File

@@ -24,23 +24,23 @@ public class MpvPlayerModule: Module {
// Enables the module to be used as a native view. Definition components that are accepted as part of the
// view definition: Prop, Events.
View(MpvPlayerView.self) {
// Defines a setter for the `url` prop.
Prop("url") { (view: MpvPlayerView, url: String) in
if let videoURL = URL(string: url) {
view.loadVideo(url: videoURL, headers: nil)
}
}
// Defines a setter for headers
Prop("headers") { (view: MpvPlayerView, headers: [String: String]?) in
// Headers will be used when loading the video
}
// Defines a setter for autoplay
Prop("autoplay") { (view: MpvPlayerView, autoplay: Bool) in
if autoplay {
view.play()
}
// All video load options are passed via a single "source" prop
Prop("source") { (view: MpvPlayerView, source: [String: Any]?) in
guard let source = source,
let urlString = source["url"] as? String,
let videoURL = URL(string: urlString) else { return }
let config = VideoLoadConfig(
url: videoURL,
headers: source["headers"] as? [String: String],
externalSubtitles: source["externalSubtitles"] as? [String],
startPosition: source["startPosition"] as? Double,
autoplay: (source["autoplay"] as? Bool) ?? true,
initialSubtitleId: source["initialSubtitleId"] as? Int,
initialAudioId: source["initialAudioId"] as? Int
)
view.loadVideo(config: config)
}
// Async function to play video
@@ -122,8 +122,8 @@ public class MpvPlayerModule: Module {
return view.getCurrentSubtitleTrack()
}
AsyncFunction("addSubtitleFile") { (view: MpvPlayerView, url: String) in
view.addSubtitleFile(url: url)
AsyncFunction("addSubtitleFile") { (view: MpvPlayerView, url: String, select: Bool) in
view.addSubtitleFile(url: url, select: select)
}
// Subtitle positioning functions
@@ -165,7 +165,7 @@ public class MpvPlayerModule: Module {
}
// Defines events that the view can send to JavaScript
Events("onLoad", "onPlaybackStateChange", "onProgress", "onError")
Events("onLoad", "onPlaybackStateChange", "onProgress", "onError", "onTracksReady")
}
}
}

View File

@@ -3,6 +3,37 @@ import CoreMedia
import ExpoModulesCore
import UIKit
/// Configuration for loading a video
struct VideoLoadConfig {
let url: URL
var headers: [String: String]?
var externalSubtitles: [String]?
var startPosition: Double?
var autoplay: Bool
/// MPV subtitle track ID to select on start (1-based, -1 to disable, nil to use default)
var initialSubtitleId: Int?
/// MPV audio track ID to select on start (1-based, nil to use default)
var initialAudioId: Int?
init(
url: URL,
headers: [String: String]? = nil,
externalSubtitles: [String]? = nil,
startPosition: Double? = nil,
autoplay: Bool = true,
initialSubtitleId: Int? = nil,
initialAudioId: Int? = nil
) {
self.url = url
self.headers = headers
self.externalSubtitles = externalSubtitles
self.startPosition = startPosition
self.autoplay = autoplay
self.initialSubtitleId = initialSubtitleId
self.initialAudioId = initialAudioId
}
}
// This view will be used as a native component. Make sure to inherit from `ExpoView`
// to apply the proper styling (e.g. border radius and shadows).
class MpvPlayerView: ExpoView {
@@ -15,6 +46,7 @@ class MpvPlayerView: ExpoView {
let onPlaybackStateChange = EventDispatcher()
let onProgress = EventDispatcher()
let onError = EventDispatcher()
let onTracksReady = EventDispatcher()
private var currentURL: URL?
private var cachedPosition: Double = 0
@@ -74,10 +106,9 @@ class MpvPlayerView: ExpoView {
CATransaction.commit()
}
func loadVideo(url: URL, headers: [String: String]?) {
currentURL = url
func loadVideo(config: VideoLoadConfig) {
currentURL = config.url
// Create a simple preset with default commands
let preset = PlayerPreset(
id: .sdrRec709,
title: "Default",
@@ -86,8 +117,27 @@ class MpvPlayerView: ExpoView {
commands: []
)
renderer?.load(url: url, with: preset, headers: headers)
onLoad(["url": url.absoluteString])
// Pass everything to the renderer - it handles start position and external subs
renderer?.load(
url: config.url,
with: preset,
headers: config.headers,
startPosition: config.startPosition,
externalSubtitles: config.externalSubtitles,
initialSubtitleId: config.initialSubtitleId,
initialAudioId: config.initialAudioId
)
if config.autoplay {
play()
}
onLoad(["url": config.url.absoluteString])
}
// Convenience method for simple loads
func loadVideo(url: URL, headers: [String: String]? = nil) {
loadVideo(config: VideoLoadConfig(url: url, headers: headers))
}
func play() {
@@ -164,8 +214,8 @@ class MpvPlayerView: ExpoView {
return renderer?.getCurrentSubtitleTrack() ?? 0
}
func addSubtitleFile(url: String) {
renderer?.addSubtitleFile(url: url)
func addSubtitleFile(url: String, select: Bool = true) {
renderer?.addSubtitleFile(url: url, select: select)
}
// MARK: - Audio Track Controls
@@ -266,6 +316,13 @@ extension MpvPlayerView: MPVSoftwareRendererDelegate {
])
}
}
func renderer(_: MPVSoftwareRenderer, didUpdateTrackList trackCount: Int) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onTracksReady(["trackCount": trackCount])
}
}
}
// MARK: - PiPControllerDelegate

View File

@@ -21,6 +21,10 @@ export type OnErrorEventPayload = {
error: string;
};
export type OnTracksReadyEventPayload = {
trackCount: number;
};
export type MpvPlayerModuleEvents = {
onChange: (params: ChangeEventPayload) => void;
};
@@ -29,10 +33,20 @@ export type ChangeEventPayload = {
value: string;
};
export type MpvPlayerViewProps = {
url?: string;
export type VideoSource = {
url: string;
headers?: Record<string, string>;
externalSubtitles?: string[];
startPosition?: number;
autoplay?: boolean;
/** MPV subtitle track ID to select on start (1-based, -1 to disable) */
initialSubtitleId?: number;
/** MPV audio track ID to select on start (1-based) */
initialAudioId?: number;
};
export type MpvPlayerViewProps = {
source?: VideoSource;
style?: StyleProp<ViewStyle>;
onLoad?: (event: { nativeEvent: OnLoadEventPayload }) => void;
onPlaybackStateChange?: (event: {
@@ -40,6 +54,7 @@ export type MpvPlayerViewProps = {
}) => void;
onProgress?: (event: { nativeEvent: OnProgressEventPayload }) => void;
onError?: (event: { nativeEvent: OnErrorEventPayload }) => void;
onTracksReady?: (event: { nativeEvent: OnTracksReadyEventPayload }) => void;
};
export interface MpvPlayerViewRef {
@@ -61,7 +76,7 @@ export interface MpvPlayerViewRef {
setSubtitleTrack: (trackId: number) => Promise<void>;
disableSubtitles: () => Promise<void>;
getCurrentSubtitleTrack: () => Promise<number>;
addSubtitleFile: (url: string) => Promise<void>;
addSubtitleFile: (url: string, select?: boolean) => Promise<void>;
// Subtitle positioning
setSubtitlePosition: (position: number) => Promise<void>;
setSubtitleScale: (scale: number) => Promise<void>;

View File

@@ -63,8 +63,8 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
getCurrentSubtitleTrack: async () => {
return await nativeRef.current?.getCurrentSubtitleTrack();
},
addSubtitleFile: async (url: string) => {
await nativeRef.current?.addSubtitleFile(url);
addSubtitleFile: async (url: string, select = true) => {
await nativeRef.current?.addSubtitleFile(url, select);
},
setSubtitlePosition: async (position: number) => {
await nativeRef.current?.setSubtitlePosition(position);

View File

@@ -1,4 +1,13 @@
// utils/getDefaultPlaySettings.ts
/**
* getDefaultPlaySettings.ts
*
* Determines default audio/subtitle tracks and bitrate for playback.
*
* Two use cases:
* 1. INITIAL PLAY: No previous state, uses media defaults + user language preferences
* 2. SEQUENTIAL PLAY: Has previous state (e.g., next episode), uses StreamRanker
* to find matching tracks in the new media
*/
import type {
BaseItemDto,
@@ -12,86 +21,83 @@ import {
SubtitleStreamRanker,
} from "../streamRanker";
interface PlaySettings {
export interface PlaySettings {
item: BaseItemDto;
bitrate: (typeof BITRATES)[0];
mediaSource?: MediaSourceInfo | null;
audioIndex?: number | undefined;
subtitleIndex?: number | undefined;
}
export interface previousIndexes {
audioIndex?: number;
subtitleIndex?: number;
}
interface TrackOptions {
DefaultAudioStreamIndex: number | undefined;
DefaultSubtitleStreamIndex: number | undefined;
export interface PreviousIndexes {
audioIndex?: number;
subtitleIndex?: number;
}
// Used getting default values for the next player.
/**
* Get default play settings for an item.
*
* @param item - The media item to play
* @param settings - User settings (language preferences, bitrate, etc.)
* @param previous - Optional previous track selections to carry over (for sequential play)
*/
export function getDefaultPlaySettings(
item: BaseItemDto,
settings: Settings,
previousIndexes?: previousIndexes,
previousSource?: MediaSourceInfo,
settings: Settings | null,
previous?: { indexes?: PreviousIndexes; source?: MediaSourceInfo },
): PlaySettings {
if (item.Type === "Program") {
return {
item,
bitrate: BITRATES[0],
mediaSource: undefined,
audioIndex: undefined,
subtitleIndex: undefined,
};
}
const bitrate = settings?.defaultBitrate ?? BITRATES[0];
// 1. Get first media source
// Live TV programs don't have media sources
if (item.Type === "Program") {
return { item, bitrate };
}
const mediaSource = item.MediaSources?.[0];
const streams = mediaSource?.MediaStreams ?? [];
// We prefer the previous track over the default track.
const trackOptions: TrackOptions = {
DefaultAudioStreamIndex: mediaSource?.DefaultAudioStreamIndex ?? -1,
DefaultSubtitleStreamIndex: mediaSource?.DefaultSubtitleStreamIndex ?? -1,
};
// Start with media source defaults
let audioIndex = mediaSource?.DefaultAudioStreamIndex;
let subtitleIndex = mediaSource?.DefaultSubtitleStreamIndex ?? -1;
const mediaStreams = mediaSource?.MediaStreams ?? [];
if (settings?.rememberSubtitleSelections && previousIndexes) {
if (previousIndexes.subtitleIndex !== undefined && previousSource) {
const subtitleRanker = new SubtitleStreamRanker();
const ranker = new StreamRanker(subtitleRanker);
// Try to match previous selections (sequential play)
if (previous?.indexes && previous?.source && settings) {
if (
settings.rememberSubtitleSelections &&
previous.indexes.subtitleIndex !== undefined
) {
const ranker = new StreamRanker(new SubtitleStreamRanker());
const result = { DefaultSubtitleStreamIndex: subtitleIndex };
ranker.rankStream(
previousIndexes.subtitleIndex,
previousSource,
mediaStreams,
trackOptions,
previous.indexes.subtitleIndex,
previous.source,
streams,
result,
);
subtitleIndex = result.DefaultSubtitleStreamIndex;
}
if (
settings.rememberAudioSelections &&
previous.indexes.audioIndex !== undefined
) {
const ranker = new StreamRanker(new AudioStreamRanker());
const result = { DefaultAudioStreamIndex: audioIndex };
ranker.rankStream(
previous.indexes.audioIndex,
previous.source,
streams,
result,
);
audioIndex = result.DefaultAudioStreamIndex;
}
}
if (settings?.rememberAudioSelections && previousIndexes) {
if (previousIndexes.audioIndex !== undefined && previousSource) {
const audioRanker = new AudioStreamRanker();
const ranker = new StreamRanker(audioRanker);
ranker.rankStream(
previousIndexes.audioIndex,
previousSource,
mediaStreams,
trackOptions,
);
}
}
// 4. Get default bitrate from settings or fallback to max
const bitrate = settings.defaultBitrate ?? BITRATES[0];
return {
item,
bitrate,
mediaSource,
audioIndex: trackOptions.DefaultAudioStreamIndex,
subtitleIndex: trackOptions.DefaultSubtitleStreamIndex,
audioIndex: audioIndex ?? undefined,
subtitleIndex: subtitleIndex ?? undefined,
};
}

View File

@@ -1,7 +1,7 @@
import type { Api } from "@jellyfin/sdk";
import type { AxiosResponse } from "axios";
import type { Settings } from "@/utils/atoms/settings";
import { generateDeviceProfile } from "@/utils/profiles/native";
import type { Settings } from "../../atoms/settings";
import { generateDeviceProfile } from "../../profiles/native";
import { getAuthHeaders } from "../jellyfin";
interface PostCapabilitiesParams {

View File

@@ -0,0 +1,115 @@
/**
* Subtitle utility functions for mapping between Jellyfin and MPV track indices.
*
* Jellyfin uses server-side indices (e.g., 3, 4, 5 for subtitles in MediaStreams).
* MPV uses its own track IDs starting from 1, only counting tracks loaded into MPV.
*
* Image-based subtitles (PGS, VOBSUB) during transcoding are burned into the video
* and NOT available in MPV's track list.
*/
import {
type MediaSourceInfo,
type MediaStream,
SubtitleDeliveryMethod,
} from "@jellyfin/sdk/lib/generated-client";
/** Check if subtitle is image-based (PGS, VOBSUB, etc.) */
export const isImageBasedSubtitle = (sub: MediaStream): boolean =>
sub.IsTextSubtitleStream === false;
/**
* Determine if a subtitle will be available in MPV's track list.
*
* A subtitle is in MPV if:
* - Delivery is Embed/Hls/External AND not an image-based sub during transcode
*/
export const isSubtitleInMpv = (
sub: MediaStream,
isTranscoding: boolean,
): boolean => {
// During transcoding, image-based subs are burned in, not in MPV
if (isTranscoding && isImageBasedSubtitle(sub)) {
return false;
}
// Embed/Hls/External methods mean the sub is loaded into MPV
return (
sub.DeliveryMethod === SubtitleDeliveryMethod.Embed ||
sub.DeliveryMethod === SubtitleDeliveryMethod.Hls ||
sub.DeliveryMethod === SubtitleDeliveryMethod.External
);
};
/**
* Calculate the MPV track ID for a given Jellyfin subtitle index.
*
* MPV track IDs are 1-based and only count subtitles that are actually in MPV.
* We iterate through all subtitles, counting only those in MPV, until we find
* the one matching the Jellyfin index.
*
* @param mediaSource - The media source containing subtitle streams
* @param jellyfinSubtitleIndex - The Jellyfin server-side subtitle index (-1 = disabled)
* @param isTranscoding - Whether the stream is being transcoded
* @returns MPV track ID (1-based), or -1 if disabled, or undefined if not in MPV
*/
export const getMpvSubtitleId = (
mediaSource: MediaSourceInfo | null | undefined,
jellyfinSubtitleIndex: number | undefined,
isTranscoding: boolean,
): number | undefined => {
// -1 or undefined means disabled
if (jellyfinSubtitleIndex === undefined || jellyfinSubtitleIndex === -1) {
return -1;
}
const allSubs =
mediaSource?.MediaStreams?.filter((s) => s.Type === "Subtitle") || [];
// Find the subtitle with the matching Jellyfin index
const targetSub = allSubs.find((s) => s.Index === jellyfinSubtitleIndex);
// If the target subtitle isn't in MPV (e.g., image-based during transcode), return undefined
if (!targetSub || !isSubtitleInMpv(targetSub, isTranscoding)) {
return undefined;
}
// Count MPV track position (1-based)
let mpvIndex = 0;
for (const sub of allSubs) {
if (isSubtitleInMpv(sub, isTranscoding)) {
mpvIndex++;
if (sub.Index === jellyfinSubtitleIndex) {
return mpvIndex;
}
}
}
return undefined;
};
/**
* Calculate the MPV track ID for a given Jellyfin audio index.
*
* Audio tracks are simpler - they're always in MPV (no burn-in like image subs).
* MPV track IDs are 1-based.
*
* @param mediaSource - The media source containing audio streams
* @param jellyfinAudioIndex - The Jellyfin server-side audio index
* @returns MPV track ID (1-based), or undefined if not found
*/
export const getMpvAudioId = (
mediaSource: MediaSourceInfo | null | undefined,
jellyfinAudioIndex: number | undefined,
): number | undefined => {
if (jellyfinAudioIndex === undefined) {
return undefined;
}
const allAudio =
mediaSource?.MediaStreams?.filter((s) => s.Type === "Audio") || [];
// Find position in audio list (1-based for MPV)
const position = allAudio.findIndex((a) => a.Index === jellyfinAudioIndex);
return position >= 0 ? position + 1 : undefined;
};

View File

@@ -4,8 +4,4 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
export interface DeviceProfileOptions {
transcode?: boolean;
}
export function generateDeviceProfile(options?: DeviceProfileOptions): any;
export function generateDeviceProfile(): any;

View File

@@ -6,12 +6,12 @@
import MediaTypes from "../../constants/MediaTypes";
import { getSubtitleProfiles } from "./subtitles";
export const generateDeviceProfile = ({ transcode = false } = {}) => {
export const generateDeviceProfile = () => {
/**
* Device profile for Native video player
*/
const profile = {
Name: `1. Vlc Player${transcode ? " (Transcoding)" : ""}`,
Name: `1. MPV Player`,
MaxStaticBitrate: 999_999_999,
MaxStreamingBitrate: 999_999_999,
CodecProfiles: [
@@ -48,7 +48,7 @@ export const generateDeviceProfile = ({ transcode = false } = {}) => {
Container: "mp4,mkv,avi,mov,flv,ts,m2ts,webm,ogv,3gp,hls",
VideoCodec:
"h264,hevc,mpeg4,divx,xvid,wmv,vc1,vp8,vp9,av1,avi,mpeg,mpeg2video",
AudioCodec: "aac,ac3,eac3,mp3,flac,alac,opus,vorbis,wma,dts",
AudioCodec: "aac,ac3,eac3,mp3,flac,alac,opus,vorbis,wma,dts,truehd",
},
{
Type: MediaTypes.Audio,
@@ -75,7 +75,7 @@ export const generateDeviceProfile = ({ transcode = false } = {}) => {
MaxAudioChannels: "2",
},
],
SubtitleProfiles: getSubtitleProfiles(transcode ? "hls" : "External"),
SubtitleProfiles: getSubtitleProfiles(),
};
return profile;

View File

@@ -4,26 +4,19 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
const COMMON_SUBTITLE_PROFILES = [
// Official formats
{ Format: "dvdsub", Method: "Embed" },
{ Format: "dvdsub", Method: "Encode" },
{ Format: "idx", Method: "Embed" },
{ Format: "idx", Method: "Encode" },
{ Format: "pgs", Method: "Embed" },
{ Format: "pgs", Method: "Encode" },
{ Format: "pgssub", Method: "Embed" },
{ Format: "pgssub", Method: "Encode" },
{ Format: "teletext", Method: "Embed" },
{ Format: "teletext", Method: "Encode" },
// Image-based formats - these need to be burned in by Jellyfin (Encode method)
// because MPV cannot load them externally over HTTP
const IMAGE_BASED_FORMATS = [
"dvdsub",
"idx",
"pgs",
"pgssub",
"teletext",
"vobsub",
];
const VARYING_SUBTITLE_FORMATS = [
// Text-based formats - these can be loaded externally by MPV
const TEXT_BASED_FORMATS = [
"webvtt",
"vtt",
"srt",
@@ -46,11 +39,23 @@ const VARYING_SUBTITLE_FORMATS = [
"xsub",
];
export const getSubtitleProfiles = (secondaryMethod) => {
const profiles = [...COMMON_SUBTITLE_PROFILES];
for (const format of VARYING_SUBTITLE_FORMATS) {
export const getSubtitleProfiles = () => {
const profiles = [];
// Image-based formats: Embed or Encode (burn-in), NOT External
for (const format of IMAGE_BASED_FORMATS) {
profiles.push({ Format: format, Method: "Embed" });
profiles.push({ Format: format, Method: secondaryMethod });
profiles.push({ Format: format, Method: "Encode" });
}
// Text-based formats: Embed or External
for (const format of TEXT_BASED_FORMATS) {
profiles.push({ Format: format, Method: "Embed" });
profiles.push({ Format: format, Method: "External" });
}
return profiles;
};
// Export for use in player filtering
export const IMAGE_SUBTITLE_CODECS = IMAGE_BASED_FORMATS;