Compare commits

..

5 Commits

Author SHA1 Message Date
Alex Kim
9725d499cd Remove build 2026-01-10 05:29:40 +11:00
Alex Kim
4f1567bfb3 android implementation 2026-01-10 05:27:11 +11:00
Alex Kim
2ead569fb7 used better names 2026-01-10 02:56:28 +11:00
Alex Kim
d1fdea76e8 WIP 2026-01-10 02:31:21 +11:00
Alex
fd2d420320 WIP 2025-12-11 21:01:00 +11:00
35 changed files with 2984 additions and 1803 deletions

2
.gitignore vendored
View File

@@ -19,7 +19,7 @@ web-build/
/androidtv
# Module-specific Builds
modules/vlc-player/android/build
modules/mpv-player/android/build
modules/player/android
modules/hls-downloader/android/build

View File

@@ -79,7 +79,7 @@
"targetSdkVersion": 35,
"buildToolsVersion": "35.0.0",
"kotlinVersion": "2.0.21",
"minSdkVersion": 24,
"minSdkVersion": 26,
"usesCleartextTraffic": true,
"packagingOptions": {
"jniLibs": {
@@ -133,7 +133,14 @@
["./plugins/withChangeNativeAndroidTextToWhite.js"],
["./plugins/withAndroidManifest.js"],
["./plugins/withTrustLocalCerts.js"],
["./plugins/withGradleProperties.js"]
["./plugins/withGradleProperties.js"],
[
"./plugins/withGitPod.js",
{
"podName": "MPVKit-GPL",
"podspecUrl": "https://raw.githubusercontent.com/Alexk2309/MPVKit/0.40.0-av/MPVKit-GPL.podspec"
}
]
],
"experiments": {
"typedRoutes": true

View File

@@ -73,6 +73,12 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
defaultSubtitleIndex,
} = useDefaultPlaySettings(item, settings);
console.log("defaultMediaSource", {
defaultAudioIndex,
defaultBitrate,
defaultSubtitleIndex,
});
const logoUrl = useMemo(
() => (item ? getLogoImageUrlById({ api, item }) : null),
[api, item],

View File

@@ -104,6 +104,11 @@ export const MediaSourceButton: React.FC<Props> = ({
// Audio track group
if (audioStreams.length > 0) {
console.log("Audio comparison:", {
selectedAudioIndex: selectedOptions.audioIndex,
streamIndices: audioStreams.map((s) => s.Index),
});
groups.push({
title: t("item_card.audio"),
options: audioStreams.map((stream) => ({

View File

@@ -280,6 +280,7 @@ export const PlayButton: React.FC<Props> = ({
]);
const onPress = useCallback(async () => {
console.log("onPress");
if (!item) return;
lightHapticFeedback();

View File

@@ -59,6 +59,7 @@ export const PlayButton: React.FC<Props> = ({
);
const onPress = () => {
console.log("onpress");
if (!item) return;
lightHapticFeedback();

View File

@@ -118,7 +118,7 @@ export const Controls: FC<Props> = ({
} = useTrickplay(item);
const min = useSharedValue(0);
const max = useSharedValue(item.RunTimeTicks || 0);
const max = useSharedValue(ticksToMs(item.RunTimeTicks || 0));
// Animation values for controls
const controlsOpacity = useSharedValue(showControls ? 1 : 0);

View File

@@ -12,6 +12,12 @@ const useDefaultPlaySettings = (item: BaseItemDto, settings: Settings | null) =>
const { mediaSource, audioIndex, subtitleIndex, bitrate } =
getDefaultPlaySettings(item, settings);
console.log("defaultPlaySettings", {
audioIndex,
subtitleIndex,
bitrate,
});
return {
defaultMediaSource: mediaSource,
defaultAudioIndex: audioIndex,

View File

@@ -96,6 +96,8 @@ export const useWebSocket = ({
| Record<string, string>
| undefined; // Arguments are Dictionary<string, string>
console.log("[WS] ~ ", lastMessage);
if (command === "PlayPause") {
console.log("Command ~ PlayPause");
togglePlay();

View File

@@ -25,7 +25,7 @@ if (useManagedAndroidSdkVersions) {
project.android {
compileSdkVersion safeExtGet("compileSdkVersion", 36)
defaultConfig {
minSdkVersion safeExtGet("minSdkVersion", 24)
minSdkVersion safeExtGet("minSdkVersion", 26)
targetSdkVersion safeExtGet("targetSdkVersion", 36)
}
}
@@ -36,8 +36,22 @@ android {
defaultConfig {
versionCode 1
versionName "0.7.6"
ndk {
// Architectures supported by mpv-android
abiFilters 'arm64-v8a', 'armeabi-v7a', 'x86', 'x86_64'
}
}
lintOptions {
abortOnError false
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
}
dependencies {
// libmpv from Maven Central
implementation 'dev.jdtech.mpv:libmpv:0.5.1'
}

View File

@@ -1,2 +1,9 @@
<manifest>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- Required for network streaming -->
<uses-permission android:name="android.permission.INTERNET" />
<!-- Picture-in-Picture feature -->
<uses-feature
android:name="android.software.picture_in_picture"
android:required="false" />
</manifest>

View File

@@ -0,0 +1,543 @@
package expo.modules.mpvplayer
import android.content.Context
import android.os.Handler
import android.os.Looper
import android.util.Log
import android.view.Surface
/**
* MPV renderer that wraps libmpv for video playback.
* This mirrors the iOS MPVLayerRenderer implementation.
*/
class MPVLayerRenderer(private val context: Context) : MPVLib.EventObserver {
companion object {
private const val TAG = "MPVLayerRenderer"
// Property observation format types
const val MPV_FORMAT_NONE = 0
const val MPV_FORMAT_STRING = 1
const val MPV_FORMAT_OSD_STRING = 2
const val MPV_FORMAT_FLAG = 3
const val MPV_FORMAT_INT64 = 4
const val MPV_FORMAT_DOUBLE = 5
const val MPV_FORMAT_NODE = 6
}
interface Delegate {
fun onPositionChanged(position: Double, duration: Double)
fun onPauseChanged(isPaused: Boolean)
fun onLoadingChanged(isLoading: Boolean)
fun onReadyToSeek()
fun onTracksReady()
fun onError(message: String)
fun onVideoDimensionsChanged(width: Int, height: Int)
}
var delegate: Delegate? = null
private val mainHandler = Handler(Looper.getMainLooper())
private var surface: Surface? = null
private var isRunning = false
private var isStopping = false
// Cached state
private var cachedPosition: Double = 0.0
private var cachedDuration: Double = 0.0
private var _isPaused: Boolean = true
private var _isLoading: Boolean = false
private var _playbackSpeed: Double = 1.0
private var isReadyToSeek: Boolean = false
// Video dimensions
private var _videoWidth: Int = 0
private var _videoHeight: Int = 0
val videoWidth: Int
get() = _videoWidth
val videoHeight: Int
get() = _videoHeight
// Current video config
private var currentUrl: String? = null
private var currentHeaders: Map<String, String>? = null
private var pendingExternalSubtitles: List<String> = emptyList()
private var initialSubtitleId: Int? = null
private var initialAudioId: Int? = null
val isPausedState: Boolean
get() = _isPaused
val currentPosition: Double
get() = cachedPosition
val duration: Double
get() = cachedDuration
fun start() {
if (isRunning) return
try {
MPVLib.create(context)
MPVLib.addObserver(this)
// Configure mpv options before initialization (based on Findroid)
MPVLib.setOptionString("vo", "gpu")
MPVLib.setOptionString("gpu-context", "android")
MPVLib.setOptionString("opengl-es", "yes")
// Hardware video decoding
MPVLib.setOptionString("hwdec", "mediacodec-copy")
MPVLib.setOptionString("hwdec-codecs", "h264,hevc,mpeg4,mpeg2video,vp8,vp9,av1")
// Cache settings for better network streaming
MPVLib.setOptionString("cache", "yes")
MPVLib.setOptionString("cache-pause-initial", "yes")
MPVLib.setOptionString("demuxer-max-bytes", "150MiB")
MPVLib.setOptionString("demuxer-max-back-bytes", "75MiB")
MPVLib.setOptionString("demuxer-readahead-secs", "20")
// Seeking optimization - faster seeking at the cost of less precision
// Use keyframe seeking by default (much faster for network streams)
MPVLib.setOptionString("hr-seek", "no")
// Drop frames during seeking for faster response
MPVLib.setOptionString("hr-seek-framedrop", "yes")
// Subtitle settings
MPVLib.setOptionString("sub-scale-with-window", "yes")
MPVLib.setOptionString("sub-use-margins", "no")
MPVLib.setOptionString("subs-match-os-language", "yes")
MPVLib.setOptionString("subs-fallback", "yes")
// Important: Start with force-window=no, will be set to yes when surface is attached
MPVLib.setOptionString("force-window", "no")
MPVLib.setOptionString("keep-open", "always")
MPVLib.initialize()
// Observe properties
observeProperties()
isRunning = true
Log.i(TAG, "MPV renderer started")
} catch (e: Exception) {
Log.e(TAG, "Failed to start MPV renderer: ${e.message}")
delegate?.onError("Failed to start renderer: ${e.message}")
}
}
fun stop() {
if (isStopping) return
if (!isRunning) return
isStopping = true
isRunning = false
try {
MPVLib.removeObserver(this)
MPVLib.detachSurface()
MPVLib.destroy()
} catch (e: Exception) {
Log.e(TAG, "Error stopping MPV: ${e.message}")
}
isStopping = false
}
/**
* Attach surface and re-enable video output.
* Based on Findroid's implementation.
*/
fun attachSurface(surface: Surface) {
this.surface = surface
if (isRunning) {
MPVLib.attachSurface(surface)
// Re-enable video output after attaching surface (Findroid approach)
MPVLib.setOptionString("force-window", "yes")
MPVLib.setOptionString("vo", "gpu")
Log.i(TAG, "Surface attached, video output re-enabled")
}
}
/**
* Detach surface and disable video output.
* Based on Findroid's implementation.
*/
fun detachSurface() {
this.surface = null
if (isRunning) {
try {
// Disable video output before detaching surface (Findroid approach)
MPVLib.setOptionString("vo", "null")
MPVLib.setOptionString("force-window", "no")
Log.i(TAG, "Video output disabled before surface detach")
} catch (e: Exception) {
Log.e(TAG, "Failed to disable video output: ${e.message}")
}
MPVLib.detachSurface()
}
}
/**
* Updates the surface size. Called from surfaceChanged.
* Based on Findroid's implementation.
*/
fun updateSurfaceSize(width: Int, height: Int) {
if (isRunning) {
MPVLib.setPropertyString("android-surface-size", "${width}x$height")
Log.i(TAG, "Surface size updated: ${width}x$height")
}
}
fun load(
url: String,
headers: Map<String, String>? = null,
startPosition: Double? = null,
externalSubtitles: List<String>? = null,
initialSubtitleId: Int? = null,
initialAudioId: Int? = null
) {
currentUrl = url
currentHeaders = headers
pendingExternalSubtitles = externalSubtitles ?: emptyList()
this.initialSubtitleId = initialSubtitleId
this.initialAudioId = initialAudioId
_isLoading = true
isReadyToSeek = false
mainHandler.post { delegate?.onLoadingChanged(true) }
// Stop previous playback
MPVLib.command(arrayOf("stop"))
// Set HTTP headers if provided
updateHttpHeaders(headers)
// Set start position
if (startPosition != null && startPosition > 0) {
MPVLib.setPropertyString("start", String.format("%.2f", startPosition))
} else {
MPVLib.setPropertyString("start", "0")
}
// Set initial audio track if specified
if (initialAudioId != null && initialAudioId > 0) {
setAudioTrack(initialAudioId)
}
// Set initial subtitle track if no external subs
if (pendingExternalSubtitles.isEmpty()) {
if (initialSubtitleId != null) {
setSubtitleTrack(initialSubtitleId)
} else {
disableSubtitles()
}
} else {
disableSubtitles()
}
// Load the file
MPVLib.command(arrayOf("loadfile", url, "replace"))
}
fun reloadCurrentItem() {
currentUrl?.let { url ->
load(url, currentHeaders)
}
}
private fun updateHttpHeaders(headers: Map<String, String>?) {
if (headers.isNullOrEmpty()) {
// Clear headers
return
}
val headerString = headers.entries.joinToString("\r\n") { "${it.key}: ${it.value}" }
MPVLib.setPropertyString("http-header-fields", headerString)
}
private fun observeProperties() {
MPVLib.observeProperty("duration", MPV_FORMAT_DOUBLE)
MPVLib.observeProperty("time-pos", MPV_FORMAT_DOUBLE)
MPVLib.observeProperty("pause", MPV_FORMAT_FLAG)
MPVLib.observeProperty("track-list/count", MPV_FORMAT_INT64)
MPVLib.observeProperty("paused-for-cache", MPV_FORMAT_FLAG)
// Video dimensions for PiP aspect ratio
MPVLib.observeProperty("video-params/w", MPV_FORMAT_INT64)
MPVLib.observeProperty("video-params/h", MPV_FORMAT_INT64)
}
// MARK: - Playback Controls
fun play() {
MPVLib.setPropertyBoolean("pause", false)
}
fun pause() {
MPVLib.setPropertyBoolean("pause", true)
}
fun togglePause() {
if (_isPaused) play() else pause()
}
fun seekTo(seconds: Double) {
val clamped = maxOf(0.0, seconds)
cachedPosition = clamped
MPVLib.command(arrayOf("seek", clamped.toString(), "absolute"))
}
fun seekBy(seconds: Double) {
val newPosition = maxOf(0.0, cachedPosition + seconds)
cachedPosition = newPosition
MPVLib.command(arrayOf("seek", seconds.toString(), "relative"))
}
fun setSpeed(speed: Double) {
_playbackSpeed = speed
MPVLib.setPropertyDouble("speed", speed)
}
fun getSpeed(): Double {
return MPVLib.getPropertyDouble("speed") ?: _playbackSpeed
}
// MARK: - Subtitle Controls
fun getSubtitleTracks(): List<Map<String, Any>> {
val tracks = mutableListOf<Map<String, Any>>()
val trackCount = MPVLib.getPropertyInt("track-list/count") ?: 0
for (i in 0 until trackCount) {
val trackType = MPVLib.getPropertyString("track-list/$i/type") ?: continue
if (trackType != "sub") continue
val trackId = MPVLib.getPropertyInt("track-list/$i/id") ?: continue
val track = mutableMapOf<String, Any>("id" to trackId)
MPVLib.getPropertyString("track-list/$i/title")?.let { track["title"] = it }
MPVLib.getPropertyString("track-list/$i/lang")?.let { track["lang"] = it }
val selected = MPVLib.getPropertyBoolean("track-list/$i/selected") ?: false
track["selected"] = selected
tracks.add(track)
}
return tracks
}
fun setSubtitleTrack(trackId: Int) {
Log.i(TAG, "setSubtitleTrack: setting sid to $trackId")
if (trackId < 0) {
MPVLib.setPropertyString("sid", "no")
} else {
MPVLib.setPropertyInt("sid", trackId)
}
}
fun disableSubtitles() {
MPVLib.setPropertyString("sid", "no")
}
fun getCurrentSubtitleTrack(): Int {
return MPVLib.getPropertyInt("sid") ?: 0
}
fun addSubtitleFile(url: String, select: Boolean = true) {
val flag = if (select) "select" else "cached"
MPVLib.command(arrayOf("sub-add", url, flag))
}
// MARK: - Subtitle Positioning
fun setSubtitlePosition(position: Int) {
MPVLib.setPropertyInt("sub-pos", position)
}
fun setSubtitleScale(scale: Double) {
MPVLib.setPropertyDouble("sub-scale", scale)
}
fun setSubtitleMarginY(margin: Int) {
MPVLib.setPropertyInt("sub-margin-y", margin)
}
fun setSubtitleAlignX(alignment: String) {
MPVLib.setPropertyString("sub-align-x", alignment)
}
fun setSubtitleAlignY(alignment: String) {
MPVLib.setPropertyString("sub-align-y", alignment)
}
fun setSubtitleFontSize(size: Int) {
MPVLib.setPropertyInt("sub-font-size", size)
}
// MARK: - Audio Track Controls
fun getAudioTracks(): List<Map<String, Any>> {
val tracks = mutableListOf<Map<String, Any>>()
val trackCount = MPVLib.getPropertyInt("track-list/count") ?: 0
for (i in 0 until trackCount) {
val trackType = MPVLib.getPropertyString("track-list/$i/type") ?: continue
if (trackType != "audio") continue
val trackId = MPVLib.getPropertyInt("track-list/$i/id") ?: continue
val track = mutableMapOf<String, Any>("id" to trackId)
MPVLib.getPropertyString("track-list/$i/title")?.let { track["title"] = it }
MPVLib.getPropertyString("track-list/$i/lang")?.let { track["lang"] = it }
MPVLib.getPropertyString("track-list/$i/codec")?.let { track["codec"] = it }
val channels = MPVLib.getPropertyInt("track-list/$i/audio-channels")
if (channels != null && channels > 0) {
track["channels"] = channels
}
val selected = MPVLib.getPropertyBoolean("track-list/$i/selected") ?: false
track["selected"] = selected
tracks.add(track)
}
return tracks
}
fun setAudioTrack(trackId: Int) {
Log.i(TAG, "setAudioTrack: setting aid to $trackId")
MPVLib.setPropertyInt("aid", trackId)
}
fun getCurrentAudioTrack(): Int {
return MPVLib.getPropertyInt("aid") ?: 0
}
// MARK: - MPVLib.EventObserver
override fun eventProperty(property: String) {
// Property changed but no value provided
}
override fun eventProperty(property: String, value: Long) {
when (property) {
"track-list/count" -> {
if (value > 0) {
Log.i(TAG, "Track list updated: $value tracks available")
mainHandler.post { delegate?.onTracksReady() }
}
}
"video-params/w" -> {
val width = value.toInt()
if (width > 0 && width != _videoWidth) {
_videoWidth = width
notifyVideoDimensionsIfReady()
}
}
"video-params/h" -> {
val height = value.toInt()
if (height > 0 && height != _videoHeight) {
_videoHeight = height
notifyVideoDimensionsIfReady()
}
}
}
}
private fun notifyVideoDimensionsIfReady() {
if (_videoWidth > 0 && _videoHeight > 0) {
Log.i(TAG, "Video dimensions: ${_videoWidth}x${_videoHeight}")
mainHandler.post { delegate?.onVideoDimensionsChanged(_videoWidth, _videoHeight) }
}
}
override fun eventProperty(property: String, value: Boolean) {
when (property) {
"pause" -> {
if (value != _isPaused) {
_isPaused = value
mainHandler.post { delegate?.onPauseChanged(value) }
}
}
"paused-for-cache" -> {
if (value != _isLoading) {
_isLoading = value
mainHandler.post { delegate?.onLoadingChanged(value) }
}
}
}
}
override fun eventProperty(property: String, value: String) {
// Handle string properties if needed
}
override fun eventProperty(property: String, value: Double) {
when (property) {
"duration" -> {
cachedDuration = value
mainHandler.post { delegate?.onPositionChanged(cachedPosition, cachedDuration) }
}
"time-pos" -> {
cachedPosition = value
mainHandler.post { delegate?.onPositionChanged(cachedPosition, cachedDuration) }
}
}
}
override fun event(eventId: Int) {
when (eventId) {
MPVLib.MPV_EVENT_FILE_LOADED -> {
// Add external subtitles now that file is loaded
if (pendingExternalSubtitles.isNotEmpty()) {
for (subUrl in pendingExternalSubtitles) {
MPVLib.command(arrayOf("sub-add", subUrl))
}
pendingExternalSubtitles = emptyList()
// Set subtitle after external subs are added
initialSubtitleId?.let { setSubtitleTrack(it) } ?: disableSubtitles()
}
if (!isReadyToSeek) {
isReadyToSeek = true
mainHandler.post { delegate?.onReadyToSeek() }
}
if (_isLoading) {
_isLoading = false
mainHandler.post { delegate?.onLoadingChanged(false) }
}
}
MPVLib.MPV_EVENT_SEEK -> {
// Seek started - show loading indicator
if (!_isLoading) {
_isLoading = true
mainHandler.post { delegate?.onLoadingChanged(true) }
}
}
MPVLib.MPV_EVENT_PLAYBACK_RESTART -> {
// Video playback has started/restarted (including after seek)
if (_isLoading) {
_isLoading = false
mainHandler.post { delegate?.onLoadingChanged(false) }
}
}
MPVLib.MPV_EVENT_END_FILE -> {
Log.i(TAG, "Playback ended")
}
MPVLib.MPV_EVENT_SHUTDOWN -> {
Log.w(TAG, "MPV shutdown")
}
}
}
}

View File

@@ -0,0 +1,220 @@
package expo.modules.mpvplayer
import android.content.Context
import android.util.Log
import android.view.Surface
import dev.jdtech.mpv.MPVLib as LibMPV
/**
* Wrapper around the dev.jdtech.mpv.MPVLib class.
* This provides a consistent interface for the rest of the app.
*/
object MPVLib {
private const val TAG = "MPVLib"
private var initialized = false
// Event observer interface
interface EventObserver {
fun eventProperty(property: String)
fun eventProperty(property: String, value: Long)
fun eventProperty(property: String, value: Boolean)
fun eventProperty(property: String, value: String)
fun eventProperty(property: String, value: Double)
fun event(eventId: Int)
}
private val observers = mutableListOf<EventObserver>()
// Library event observer that forwards to our observers
private val libObserver = object : LibMPV.EventObserver {
override fun eventProperty(property: String) {
synchronized(observers) {
for (observer in observers) {
observer.eventProperty(property)
}
}
}
override fun eventProperty(property: String, value: Long) {
synchronized(observers) {
for (observer in observers) {
observer.eventProperty(property, value)
}
}
}
override fun eventProperty(property: String, value: Boolean) {
synchronized(observers) {
for (observer in observers) {
observer.eventProperty(property, value)
}
}
}
override fun eventProperty(property: String, value: String) {
synchronized(observers) {
for (observer in observers) {
observer.eventProperty(property, value)
}
}
}
override fun eventProperty(property: String, value: Double) {
synchronized(observers) {
for (observer in observers) {
observer.eventProperty(property, value)
}
}
}
override fun event(eventId: Int) {
synchronized(observers) {
for (observer in observers) {
observer.event(eventId)
}
}
}
}
fun addObserver(observer: EventObserver) {
synchronized(observers) {
observers.add(observer)
}
}
fun removeObserver(observer: EventObserver) {
synchronized(observers) {
observers.remove(observer)
}
}
// MPV Event IDs
const val MPV_EVENT_NONE = 0
const val MPV_EVENT_SHUTDOWN = 1
const val MPV_EVENT_LOG_MESSAGE = 2
const val MPV_EVENT_GET_PROPERTY_REPLY = 3
const val MPV_EVENT_SET_PROPERTY_REPLY = 4
const val MPV_EVENT_COMMAND_REPLY = 5
const val MPV_EVENT_START_FILE = 6
const val MPV_EVENT_END_FILE = 7
const val MPV_EVENT_FILE_LOADED = 8
const val MPV_EVENT_IDLE = 11
const val MPV_EVENT_TICK = 14
const val MPV_EVENT_CLIENT_MESSAGE = 16
const val MPV_EVENT_VIDEO_RECONFIG = 17
const val MPV_EVENT_AUDIO_RECONFIG = 18
const val MPV_EVENT_SEEK = 20
const val MPV_EVENT_PLAYBACK_RESTART = 21
const val MPV_EVENT_PROPERTY_CHANGE = 22
const val MPV_EVENT_QUEUE_OVERFLOW = 24
// End file reason
const val MPV_END_FILE_REASON_EOF = 0
const val MPV_END_FILE_REASON_STOP = 2
const val MPV_END_FILE_REASON_QUIT = 3
const val MPV_END_FILE_REASON_ERROR = 4
const val MPV_END_FILE_REASON_REDIRECT = 5
/**
* Create and initialize the MPV library
*/
fun create(context: Context, configDir: String? = null) {
if (initialized) return
try {
LibMPV.create(context)
LibMPV.addObserver(libObserver)
initialized = true
Log.i(TAG, "libmpv created successfully")
} catch (e: Exception) {
Log.e(TAG, "Failed to create libmpv: ${e.message}")
throw e
}
}
fun initialize() {
LibMPV.init()
}
fun destroy() {
if (!initialized) return
try {
LibMPV.removeObserver(libObserver)
LibMPV.destroy()
} catch (e: Exception) {
Log.e(TAG, "Error destroying mpv: ${e.message}")
}
initialized = false
}
fun isInitialized(): Boolean = initialized
fun attachSurface(surface: Surface) {
LibMPV.attachSurface(surface)
}
fun detachSurface() {
LibMPV.detachSurface()
}
fun command(cmd: Array<String?>) {
LibMPV.command(cmd)
}
fun setOptionString(name: String, value: String): Int {
return LibMPV.setOptionString(name, value)
}
fun getPropertyInt(name: String): Int? {
return try {
LibMPV.getPropertyInt(name)
} catch (e: Exception) {
null
}
}
fun getPropertyDouble(name: String): Double? {
return try {
LibMPV.getPropertyDouble(name)
} catch (e: Exception) {
null
}
}
fun getPropertyBoolean(name: String): Boolean? {
return try {
LibMPV.getPropertyBoolean(name)
} catch (e: Exception) {
null
}
}
fun getPropertyString(name: String): String? {
return try {
LibMPV.getPropertyString(name)
} catch (e: Exception) {
null
}
}
fun setPropertyInt(name: String, value: Int) {
LibMPV.setPropertyInt(name, value)
}
fun setPropertyDouble(name: String, value: Double) {
LibMPV.setPropertyDouble(name, value)
}
fun setPropertyBoolean(name: String, value: Boolean) {
LibMPV.setPropertyBoolean(name, value)
}
fun setPropertyString(name: String, value: String) {
LibMPV.setPropertyString(name, value)
}
fun observeProperty(name: String, format: Int) {
LibMPV.observeProperty(name, format)
}
}

View File

@@ -2,49 +2,170 @@ package expo.modules.mpvplayer
import expo.modules.kotlin.modules.Module
import expo.modules.kotlin.modules.ModuleDefinition
import java.net.URL
class MpvPlayerModule : Module() {
// Each module class must implement the definition function. The definition consists of components
// that describes the module's functionality and behavior.
// See https://docs.expo.dev/modules/module-api for more details about available components.
override fun definition() = ModuleDefinition {
// Sets the name of the module that JavaScript code will use to refer to the module. Takes a string as an argument.
// Can be inferred from module's class name, but it's recommended to set it explicitly for clarity.
// The module will be accessible from `requireNativeModule('MpvPlayer')` in JavaScript.
Name("MpvPlayer")
override fun definition() = ModuleDefinition {
Name("MpvPlayer")
// Defines constant property on the module.
Constant("PI") {
Math.PI
// Defines event names that the module can send to JavaScript.
Events("onChange")
// Defines a JavaScript synchronous function that runs the native code on the JavaScript thread.
Function("hello") {
"Hello from MPV Player! 👋"
}
// Defines a JavaScript function that always returns a Promise and whose native code
// is by default dispatched on the different thread than the JavaScript runtime runs on.
AsyncFunction("setValueAsync") { value: String ->
sendEvent("onChange", mapOf("value" to value))
}
// Enables the module to be used as a native view.
View(MpvPlayerView::class) {
// All video load options are passed via a single "source" prop
Prop("source") { view: MpvPlayerView, source: Map<String, Any?>? ->
if (source == null) return@Prop
val urlString = source["url"] as? String ?: return@Prop
@Suppress("UNCHECKED_CAST")
val config = VideoLoadConfig(
url = urlString,
headers = source["headers"] as? Map<String, String>,
externalSubtitles = source["externalSubtitles"] as? List<String>,
startPosition = (source["startPosition"] as? Number)?.toDouble(),
autoplay = (source["autoplay"] as? Boolean) ?: true,
initialSubtitleId = (source["initialSubtitleId"] as? Number)?.toInt(),
initialAudioId = (source["initialAudioId"] as? Number)?.toInt()
)
view.loadVideo(config)
}
// Async function to play video
AsyncFunction("play") { view: MpvPlayerView ->
view.play()
}
// Async function to pause video
AsyncFunction("pause") { view: MpvPlayerView ->
view.pause()
}
// Async function to seek to position
AsyncFunction("seekTo") { view: MpvPlayerView, position: Double ->
view.seekTo(position)
}
// Async function to seek by offset
AsyncFunction("seekBy") { view: MpvPlayerView, offset: Double ->
view.seekBy(offset)
}
// Async function to set playback speed
AsyncFunction("setSpeed") { view: MpvPlayerView, speed: Double ->
view.setSpeed(speed)
}
// Function to get current speed
AsyncFunction("getSpeed") { view: MpvPlayerView ->
view.getSpeed()
}
// Function to check if paused
AsyncFunction("isPaused") { view: MpvPlayerView ->
view.isPaused()
}
// Function to get current position
AsyncFunction("getCurrentPosition") { view: MpvPlayerView ->
view.getCurrentPosition()
}
// Function to get duration
AsyncFunction("getDuration") { view: MpvPlayerView ->
view.getDuration()
}
// Picture in Picture functions
AsyncFunction("startPictureInPicture") { view: MpvPlayerView ->
view.startPictureInPicture()
}
AsyncFunction("stopPictureInPicture") { view: MpvPlayerView ->
view.stopPictureInPicture()
}
AsyncFunction("isPictureInPictureSupported") { view: MpvPlayerView ->
view.isPictureInPictureSupported()
}
AsyncFunction("isPictureInPictureActive") { view: MpvPlayerView ->
view.isPictureInPictureActive()
}
// Subtitle functions
AsyncFunction("getSubtitleTracks") { view: MpvPlayerView ->
view.getSubtitleTracks()
}
AsyncFunction("setSubtitleTrack") { view: MpvPlayerView, trackId: Int ->
view.setSubtitleTrack(trackId)
}
AsyncFunction("disableSubtitles") { view: MpvPlayerView ->
view.disableSubtitles()
}
AsyncFunction("getCurrentSubtitleTrack") { view: MpvPlayerView ->
view.getCurrentSubtitleTrack()
}
AsyncFunction("addSubtitleFile") { view: MpvPlayerView, url: String, select: Boolean ->
view.addSubtitleFile(url, select)
}
// Subtitle positioning functions
AsyncFunction("setSubtitlePosition") { view: MpvPlayerView, position: Int ->
view.setSubtitlePosition(position)
}
AsyncFunction("setSubtitleScale") { view: MpvPlayerView, scale: Double ->
view.setSubtitleScale(scale)
}
AsyncFunction("setSubtitleMarginY") { view: MpvPlayerView, margin: Int ->
view.setSubtitleMarginY(margin)
}
AsyncFunction("setSubtitleAlignX") { view: MpvPlayerView, alignment: String ->
view.setSubtitleAlignX(alignment)
}
AsyncFunction("setSubtitleAlignY") { view: MpvPlayerView, alignment: String ->
view.setSubtitleAlignY(alignment)
}
AsyncFunction("setSubtitleFontSize") { view: MpvPlayerView, size: Int ->
view.setSubtitleFontSize(size)
}
// Audio track functions
AsyncFunction("getAudioTracks") { view: MpvPlayerView ->
view.getAudioTracks()
}
AsyncFunction("setAudioTrack") { view: MpvPlayerView, trackId: Int ->
view.setAudioTrack(trackId)
}
AsyncFunction("getCurrentAudioTrack") { view: MpvPlayerView ->
view.getCurrentAudioTrack()
}
// Defines events that the view can send to JavaScript
Events("onLoad", "onPlaybackStateChange", "onProgress", "onError", "onTracksReady")
}
}
// Defines event names that the module can send to JavaScript.
Events("onChange")
// Defines a JavaScript synchronous function that runs the native code on the JavaScript thread.
Function("hello") {
"Hello world! 👋"
}
// Defines a JavaScript function that always returns a Promise and whose native code
// is by default dispatched on the different thread than the JavaScript runtime runs on.
AsyncFunction("setValueAsync") { value: String ->
// Send an event to JavaScript.
sendEvent("onChange", mapOf(
"value" to value
))
}
// Enables the module to be used as a native view. Definition components that are accepted as part of
// the view definition: Prop, Events.
View(MpvPlayerView::class) {
// Defines a setter for the `url` prop.
Prop("url") { view: MpvPlayerView, url: URL ->
view.webView.loadUrl(url.toString())
}
// Defines an event that the view can send to JavaScript.
Events("onLoad")
}
}
}

View File

@@ -1,30 +1,353 @@
package expo.modules.mpvplayer
import android.content.Context
import android.webkit.WebView
import android.webkit.WebViewClient
import android.graphics.Color
import android.util.Log
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.widget.FrameLayout
import expo.modules.kotlin.AppContext
import expo.modules.kotlin.viewevent.EventDispatcher
import expo.modules.kotlin.views.ExpoView
class MpvPlayerView(context: Context, appContext: AppContext) : ExpoView(context, appContext) {
// Creates and initializes an event dispatcher for the `onLoad` event.
// The name of the event is inferred from the value and needs to match the event name defined in the module.
private val onLoad by EventDispatcher()
/**
* Configuration for loading a video
*/
data class VideoLoadConfig(
val url: String,
val headers: Map<String, String>? = null,
val externalSubtitles: List<String>? = null,
val startPosition: Double? = null,
val autoplay: Boolean = true,
val initialSubtitleId: Int? = null,
val initialAudioId: Int? = null
)
// Defines a WebView that will be used as the root subview.
internal val webView = WebView(context).apply {
layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
webViewClient = object : WebViewClient() {
override fun onPageFinished(view: WebView, url: String) {
// Sends an event to JavaScript. Triggers a callback defined on the view component in JavaScript.
onLoad(mapOf("url" to url))
}
/**
* MpvPlayerView - ExpoView that hosts the MPV player.
* This mirrors the iOS MpvPlayerView implementation.
*/
class MpvPlayerView(context: Context, appContext: AppContext) : ExpoView(context, appContext),
MPVLayerRenderer.Delegate, SurfaceHolder.Callback {
companion object {
private const val TAG = "MpvPlayerView"
}
// Event dispatchers
val onLoad by EventDispatcher()
val onPlaybackStateChange by EventDispatcher()
val onProgress by EventDispatcher()
val onError by EventDispatcher()
val onTracksReady by EventDispatcher()
private var surfaceView: SurfaceView
private var renderer: MPVLayerRenderer? = null
private var pipController: PiPController? = null
private var currentUrl: String? = null
private var cachedPosition: Double = 0.0
private var cachedDuration: Double = 0.0
private var intendedPlayState: Boolean = false
private var surfaceReady: Boolean = false
private var pendingConfig: VideoLoadConfig? = null
init {
setBackgroundColor(Color.BLACK)
// Create SurfaceView for video rendering
surfaceView = SurfaceView(context).apply {
layoutParams = FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT
)
holder.addCallback(this@MpvPlayerView)
}
addView(surfaceView)
// Initialize renderer
renderer = MPVLayerRenderer(context)
renderer?.delegate = this
// Initialize PiP controller with Expo's AppContext for proper activity access
pipController = PiPController(context, appContext)
pipController?.setPlayerView(surfaceView)
pipController?.delegate = object : PiPController.Delegate {
override fun onPlay() {
play()
}
override fun onPause() {
pause()
}
override fun onSeekBy(seconds: Double) {
seekBy(seconds)
}
}
// Start the renderer
try {
renderer?.start()
} catch (e: Exception) {
Log.e(TAG, "Failed to start renderer: ${e.message}")
onError(mapOf("error" to "Failed to start renderer: ${e.message}"))
}
}
// MARK: - SurfaceHolder.Callback
override fun surfaceCreated(holder: SurfaceHolder) {
Log.i(TAG, "Surface created")
surfaceReady = true
renderer?.attachSurface(holder.surface)
// If we have a pending load, execute it now
pendingConfig?.let { config ->
loadVideoInternal(config)
pendingConfig = null
}
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
Log.i(TAG, "Surface changed: ${width}x${height}")
// Update MPV with the new surface size (Findroid approach)
renderer?.updateSurfaceSize(width, height)
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
Log.i(TAG, "Surface destroyed")
surfaceReady = false
renderer?.detachSurface()
}
// MARK: - Video Loading
fun loadVideo(config: VideoLoadConfig) {
// Skip reload if same URL is already playing
if (currentUrl == config.url) {
return
}
if (!surfaceReady) {
// Surface not ready, store config and load when ready
pendingConfig = config
return
}
loadVideoInternal(config)
}
private fun loadVideoInternal(config: VideoLoadConfig) {
currentUrl = config.url
renderer?.load(
url = config.url,
headers = config.headers,
startPosition = config.startPosition,
externalSubtitles = config.externalSubtitles,
initialSubtitleId = config.initialSubtitleId,
initialAudioId = config.initialAudioId
)
if (config.autoplay) {
play()
}
onLoad(mapOf("url" to config.url))
}
// Convenience method for simple loads
fun loadVideo(url: String, headers: Map<String, String>? = null) {
loadVideo(VideoLoadConfig(url = url, headers = headers))
}
// MARK: - Playback Controls
fun play() {
intendedPlayState = true
renderer?.play()
pipController?.setPlaybackRate(1.0)
}
fun pause() {
intendedPlayState = false
renderer?.pause()
pipController?.setPlaybackRate(0.0)
}
fun seekTo(position: Double) {
renderer?.seekTo(position)
}
fun seekBy(offset: Double) {
renderer?.seekBy(offset)
}
fun setSpeed(speed: Double) {
renderer?.setSpeed(speed)
}
fun getSpeed(): Double {
return renderer?.getSpeed() ?: 1.0
}
fun isPaused(): Boolean {
return renderer?.isPausedState ?: true
}
fun getCurrentPosition(): Double {
return cachedPosition
}
fun getDuration(): Double {
return cachedDuration
}
// MARK: - Picture in Picture
fun startPictureInPicture() {
Log.i(TAG, "startPictureInPicture called")
pipController?.startPictureInPicture()
}
fun stopPictureInPicture() {
pipController?.stopPictureInPicture()
}
fun isPictureInPictureSupported(): Boolean {
return pipController?.isPictureInPictureSupported() ?: false
}
fun isPictureInPictureActive(): Boolean {
return pipController?.isPictureInPictureActive() ?: false
}
// MARK: - Subtitle Controls
fun getSubtitleTracks(): List<Map<String, Any>> {
return renderer?.getSubtitleTracks() ?: emptyList()
}
fun setSubtitleTrack(trackId: Int) {
renderer?.setSubtitleTrack(trackId)
}
fun disableSubtitles() {
renderer?.disableSubtitles()
}
fun getCurrentSubtitleTrack(): Int {
return renderer?.getCurrentSubtitleTrack() ?: 0
}
fun addSubtitleFile(url: String, select: Boolean = true) {
renderer?.addSubtitleFile(url, select)
}
// MARK: - Subtitle Positioning
fun setSubtitlePosition(position: Int) {
renderer?.setSubtitlePosition(position)
}
fun setSubtitleScale(scale: Double) {
renderer?.setSubtitleScale(scale)
}
fun setSubtitleMarginY(margin: Int) {
renderer?.setSubtitleMarginY(margin)
}
fun setSubtitleAlignX(alignment: String) {
renderer?.setSubtitleAlignX(alignment)
}
fun setSubtitleAlignY(alignment: String) {
renderer?.setSubtitleAlignY(alignment)
}
fun setSubtitleFontSize(size: Int) {
renderer?.setSubtitleFontSize(size)
}
// MARK: - Audio Track Controls
fun getAudioTracks(): List<Map<String, Any>> {
return renderer?.getAudioTracks() ?: emptyList()
}
fun setAudioTrack(trackId: Int) {
renderer?.setAudioTrack(trackId)
}
fun getCurrentAudioTrack(): Int {
return renderer?.getCurrentAudioTrack() ?: 0
}
// MARK: - MPVLayerRenderer.Delegate
override fun onPositionChanged(position: Double, duration: Double) {
cachedPosition = position
cachedDuration = duration
// Update PiP progress
if (pipController?.isPictureInPictureActive() == true) {
pipController?.setCurrentTime(position, duration)
}
onProgress(mapOf(
"position" to position,
"duration" to duration,
"progress" to if (duration > 0) position / duration else 0.0
))
}
override fun onPauseChanged(isPaused: Boolean) {
// Sync PiP playback rate
pipController?.setPlaybackRate(if (isPaused) 0.0 else 1.0)
onPlaybackStateChange(mapOf(
"isPaused" to isPaused,
"isPlaying" to !isPaused
))
}
override fun onLoadingChanged(isLoading: Boolean) {
onPlaybackStateChange(mapOf(
"isLoading" to isLoading
))
}
override fun onReadyToSeek() {
onPlaybackStateChange(mapOf(
"isReadyToSeek" to true
))
}
override fun onTracksReady() {
onTracksReady(emptyMap<String, Any>())
}
override fun onVideoDimensionsChanged(width: Int, height: Int) {
// Update PiP controller with video dimensions for proper aspect ratio
pipController?.setVideoDimensions(width, height)
}
override fun onError(message: String) {
onError(mapOf("error" to message))
}
// MARK: - Cleanup
fun cleanup() {
pipController?.stopPictureInPicture()
renderer?.stop()
surfaceView.holder.removeCallback(this)
}
override fun onDetachedFromWindow() {
super.onDetachedFromWindow()
cleanup()
}
}
init {
// Adds the WebView to the view hierarchy.
addView(webView)
}
}

View File

@@ -0,0 +1,263 @@
package expo.modules.mpvplayer
import android.app.Activity
import android.app.PictureInPictureParams
import android.content.Context
import android.content.pm.PackageManager
import android.graphics.Rect
import android.os.Build
import android.util.Log
import android.util.Rational
import android.view.View
import androidx.annotation.RequiresApi
import expo.modules.kotlin.AppContext
/**
* Picture-in-Picture controller for Android.
* This mirrors the iOS PiPController implementation.
*/
class PiPController(private val context: Context, private val appContext: AppContext? = null) {
companion object {
private const val TAG = "PiPController"
private const val DEFAULT_ASPECT_WIDTH = 16
private const val DEFAULT_ASPECT_HEIGHT = 9
}
interface Delegate {
fun onPlay()
fun onPause()
fun onSeekBy(seconds: Double)
}
var delegate: Delegate? = null
private var currentPosition: Double = 0.0
private var currentDuration: Double = 0.0
private var playbackRate: Double = 1.0
// Video dimensions for proper aspect ratio
private var videoWidth: Int = 0
private var videoHeight: Int = 0
// Reference to the player view for source rect
private var playerView: View? = null
/**
* Check if Picture-in-Picture is supported on this device
*/
fun isPictureInPictureSupported(): Boolean {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.packageManager.hasSystemFeature(PackageManager.FEATURE_PICTURE_IN_PICTURE)
} else {
false
}
}
/**
* Check if Picture-in-Picture is currently active
*/
fun isPictureInPictureActive(): Boolean {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val activity = getActivity()
return activity?.isInPictureInPictureMode ?: false
}
return false
}
/**
* Start Picture-in-Picture mode
*/
fun startPictureInPicture() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val activity = getActivity()
if (activity == null) {
Log.e(TAG, "Cannot start PiP: no activity found")
return
}
if (!isPictureInPictureSupported()) {
Log.e(TAG, "PiP not supported on this device")
return
}
try {
val params = buildPiPParams(forEntering = true)
activity.enterPictureInPictureMode(params)
Log.i(TAG, "Entered PiP mode")
} catch (e: Exception) {
Log.e(TAG, "Failed to enter PiP: ${e.message}")
}
} else {
Log.w(TAG, "PiP requires Android O or higher")
}
}
/**
* Stop Picture-in-Picture mode
*/
fun stopPictureInPicture() {
// On Android, exiting PiP is typically done by the user
// or by finishing the activity. We can request to move task to back.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val activity = getActivity()
if (activity?.isInPictureInPictureMode == true) {
// Move task to back which will exit PiP
activity.moveTaskToBack(false)
}
}
}
/**
* Update the current playback position and duration
* Note: We don't update PiP params here as we're not using progress in PiP controls
*/
fun setCurrentTime(position: Double, duration: Double) {
currentPosition = position
currentDuration = duration
}
/**
* Set the playback rate (0.0 for paused, 1.0 for playing)
*/
fun setPlaybackRate(rate: Double) {
playbackRate = rate
// Update PiP params to reflect play/pause state
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val activity = getActivity()
if (activity?.isInPictureInPictureMode == true) {
try {
activity.setPictureInPictureParams(buildPiPParams())
} catch (e: Exception) {
Log.e(TAG, "Failed to update PiP params: ${e.message}")
}
}
}
}
/**
* Set the video dimensions for proper aspect ratio calculation
*/
fun setVideoDimensions(width: Int, height: Int) {
if (width > 0 && height > 0) {
videoWidth = width
videoHeight = height
Log.i(TAG, "Video dimensions set: ${width}x${height}")
// Update PiP params if active
updatePiPParamsIfNeeded()
}
}
/**
* Set the player view reference for source rect hint
*/
fun setPlayerView(view: View?) {
playerView = view
}
private fun updatePiPParamsIfNeeded() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val activity = getActivity()
if (activity?.isInPictureInPictureMode == true) {
try {
activity.setPictureInPictureParams(buildPiPParams())
} catch (e: Exception) {
Log.e(TAG, "Failed to update PiP params: ${e.message}")
}
}
}
}
/**
* Build Picture-in-Picture params for the current player state.
* Calculates proper aspect ratio and source rect based on video and view dimensions.
*/
@RequiresApi(Build.VERSION_CODES.O)
private fun buildPiPParams(forEntering: Boolean = false): PictureInPictureParams {
val view = playerView
val viewWidth = view?.width ?: 0
val viewHeight = view?.height ?: 0
// Display aspect ratio from view (exactly like Findroid)
val displayAspectRatio = Rational(viewWidth.coerceAtLeast(1), viewHeight.coerceAtLeast(1))
// Video aspect ratio with 2.39:1 clamping (exactly like Findroid)
// Findroid: Rational(it.width.coerceAtMost((it.height * 2.39f).toInt()),
// it.height.coerceAtMost((it.width * 2.39f).toInt()))
val aspectRatio = if (videoWidth > 0 && videoHeight > 0) {
Rational(
videoWidth.coerceAtMost((videoHeight * 2.39f).toInt()),
videoHeight.coerceAtMost((videoWidth * 2.39f).toInt())
)
} else {
Rational(DEFAULT_ASPECT_WIDTH, DEFAULT_ASPECT_HEIGHT)
}
// Source rect hint calculation (exactly like Findroid)
val sourceRectHint = if (viewWidth > 0 && viewHeight > 0 && videoWidth > 0 && videoHeight > 0) {
if (displayAspectRatio < aspectRatio) {
// Letterboxing - black bars top/bottom
val space = ((viewHeight - (viewWidth.toFloat() / aspectRatio.toFloat())) / 2).toInt()
Rect(
0,
space,
viewWidth,
(viewWidth.toFloat() / aspectRatio.toFloat()).toInt() + space
)
} else {
// Pillarboxing - black bars left/right
val space = ((viewWidth - (viewHeight.toFloat() * aspectRatio.toFloat())) / 2).toInt()
Rect(
space,
0,
(viewHeight.toFloat() * aspectRatio.toFloat()).toInt() + space,
viewHeight
)
}
} else {
null
}
val builder = PictureInPictureParams.Builder()
.setAspectRatio(aspectRatio)
sourceRectHint?.let { builder.setSourceRectHint(it) }
// On Android 12+, enable auto-enter (like Findroid)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
builder.setAutoEnterEnabled(true)
}
return builder.build()
}
private fun getActivity(): Activity? {
// First try Expo's AppContext (preferred in React Native)
appContext?.currentActivity?.let { return it }
// Fallback: Try to get from context wrapper chain
var ctx = context
while (ctx is android.content.ContextWrapper) {
if (ctx is Activity) {
return ctx
}
ctx = ctx.baseContext
}
return null
}
/**
* Handle PiP action (called from activity when user taps PiP controls)
*/
fun handlePiPAction(action: String) {
when (action) {
"play" -> delegate?.onPlay()
"pause" -> delegate?.onPause()
"skip_forward" -> delegate?.onSeekBy(10.0)
"skip_backward" -> delegate?.onSeekBy(-10.0)
}
}
}

View File

@@ -1,6 +1,9 @@
{
"platforms": ["apple"],
"platforms": ["apple", "android", "web"],
"apple": {
"modules": ["MpvPlayerModule"]
},
"android": {
"modules": ["expo.modules.mpvplayer.MpvPlayerModule"]
}
}

View File

@@ -1,2 +1,6 @@
// Reexport the native module. On web, it will be resolved to MpvPlayerModule.web.ts
// and on native platforms to MpvPlayerModule.ts
export * from "./src/MpvPlayer.types";
export { default } from "./src/MpvPlayerModule";
export { default as MpvPlayerView } from "./src/MpvPlayerView";

View File

@@ -1,245 +0,0 @@
import Foundation
import CoreVideo
import Metal
import CoreMedia
import AVFoundation
/// Manages a pool of IOSurface-backed CVPixelBuffers that can be shared between Metal and AVFoundation
/// This enables zero-copy rendering where mpv renders to Metal textures that are directly usable by AVSampleBufferDisplayLayer
final class IOSurfaceBufferPool {
struct PooledBuffer {
let pixelBuffer: CVPixelBuffer
let texture: MTLTexture
let ioSurface: IOSurfaceRef
}
private let device: MTLDevice
private var pool: CVPixelBufferPool?
private var buffers: [PooledBuffer] = []
private var availableBuffers: [PooledBuffer] = []
private let lock = NSLock()
private(set) var width: Int = 0
private(set) var height: Int = 0
private(set) var pixelFormat: OSType = kCVPixelFormatType_32BGRA
private let maxBufferCount: Int
init(device: MTLDevice, maxBufferCount: Int = 3) {
self.device = device
self.maxBufferCount = maxBufferCount
}
deinit {
invalidate()
}
/// Configure the pool for a specific video size and format
func configure(width: Int, height: Int, pixelFormat: OSType = kCVPixelFormatType_32BGRA) -> Bool {
lock.lock()
defer { lock.unlock() }
guard width > 0, height > 0 else { return false }
// Skip if already configured for this size
if self.width == width && self.height == height && self.pixelFormat == pixelFormat && pool != nil {
return true
}
// Clear existing buffers
buffers.removeAll()
availableBuffers.removeAll()
pool = nil
self.width = width
self.height = height
self.pixelFormat = pixelFormat
// Create pixel buffer pool with IOSurface and Metal compatibility
let pixelBufferAttributes: [CFString: Any] = [
kCVPixelBufferPixelFormatTypeKey: pixelFormat,
kCVPixelBufferWidthKey: width,
kCVPixelBufferHeightKey: height,
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary,
kCVPixelBufferMetalCompatibilityKey: true,
kCVPixelBufferCGImageCompatibilityKey: true,
kCVPixelBufferCGBitmapContextCompatibilityKey: true
]
let poolAttributes: [CFString: Any] = [
kCVPixelBufferPoolMinimumBufferCountKey: maxBufferCount
]
var newPool: CVPixelBufferPool?
let status = CVPixelBufferPoolCreate(
kCFAllocatorDefault,
poolAttributes as CFDictionary,
pixelBufferAttributes as CFDictionary,
&newPool
)
guard status == kCVReturnSuccess, let createdPool = newPool else {
Logger.shared.log("Failed to create IOSurface buffer pool: \(status)", type: "Error")
return false
}
pool = createdPool
// Pre-allocate buffers
for _ in 0..<maxBufferCount {
if let buffer = createPooledBuffer() {
buffers.append(buffer)
availableBuffers.append(buffer)
}
}
return true
}
/// Get an available buffer for rendering
func dequeueBuffer() -> PooledBuffer? {
lock.lock()
defer { lock.unlock() }
if let buffer = availableBuffers.popLast() {
return buffer
}
// Try to create a new buffer if under limit
if buffers.count < maxBufferCount, let buffer = createPooledBuffer() {
buffers.append(buffer)
return buffer
}
// All buffers in use - create temporary one
return createPooledBuffer()
}
/// Return a buffer to the pool after use
func enqueueBuffer(_ buffer: PooledBuffer) {
lock.lock()
defer { lock.unlock() }
if buffers.contains(where: { $0.pixelBuffer == buffer.pixelBuffer }) {
availableBuffers.append(buffer)
}
}
/// Clear all buffers and reset the pool
func invalidate() {
lock.lock()
defer { lock.unlock() }
buffers.removeAll()
availableBuffers.removeAll()
pool = nil
width = 0
height = 0
}
private func createPooledBuffer() -> PooledBuffer? {
guard let pool = pool else { return nil }
var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer)
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
Logger.shared.log("Failed to create pixel buffer from pool: \(status)", type: "Error")
return nil
}
// Get IOSurface from pixel buffer
guard let ioSurface = CVPixelBufferGetIOSurface(buffer)?.takeUnretainedValue() else {
Logger.shared.log("Failed to get IOSurface from pixel buffer", type: "Error")
return nil
}
// Create Metal texture from IOSurface
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: metalPixelFormat(for: pixelFormat),
width: width,
height: height,
mipmapped: false
)
textureDescriptor.usage = [.renderTarget, .shaderRead, .shaderWrite]
textureDescriptor.storageMode = .shared
guard let texture = device.makeTexture(descriptor: textureDescriptor, iosurface: ioSurface, plane: 0) else {
Logger.shared.log("Failed to create Metal texture from IOSurface", type: "Error")
return nil
}
return PooledBuffer(pixelBuffer: buffer, texture: texture, ioSurface: ioSurface)
}
private func metalPixelFormat(for cvFormat: OSType) -> MTLPixelFormat {
switch cvFormat {
case kCVPixelFormatType_32BGRA:
return .bgra8Unorm
case kCVPixelFormatType_32RGBA:
return .rgba8Unorm
case kCVPixelFormatType_64RGBAHalf:
return .rgba16Float
default:
return .bgra8Unorm
}
}
}
// MARK: - CMSampleBuffer Creation
extension IOSurfaceBufferPool {
/// Create a CMSampleBuffer from a pooled buffer for AVSampleBufferDisplayLayer
static func createSampleBuffer(
from pixelBuffer: CVPixelBuffer,
formatDescription: CMVideoFormatDescription,
presentationTime: CMTime
) -> CMSampleBuffer? {
var timing = CMSampleTimingInfo(
duration: .invalid,
presentationTimeStamp: presentationTime,
decodeTimeStamp: .invalid
)
var sampleBuffer: CMSampleBuffer?
let status = CMSampleBufferCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: pixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: formatDescription,
sampleTiming: &timing,
sampleBufferOut: &sampleBuffer
)
guard status == noErr else {
Logger.shared.log("Failed to create sample buffer: \(status)", type: "Error")
return nil
}
return sampleBuffer
}
/// Create a format description for the current pool configuration
func createFormatDescription() -> CMVideoFormatDescription? {
guard let buffer = dequeueBuffer() else { return nil }
defer { enqueueBuffer(buffer) }
var formatDescription: CMVideoFormatDescription?
let status = CMVideoFormatDescriptionCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: buffer.pixelBuffer,
formatDescriptionOut: &formatDescription
)
guard status == noErr else {
Logger.shared.log("Failed to create format description: \(status)", type: "Error")
return nil
}
return formatDescription
}
}

View File

@@ -1,6 +1,6 @@
import Foundation
final class Logger {
class Logger {
static let shared = Logger()
struct LogEntry {
@@ -12,7 +12,6 @@ final class Logger {
private let queue = DispatchQueue(label: "mpvkit.logger", attributes: .concurrent)
private var logs: [LogEntry] = []
private let logFileURL: URL
private let dateFormatter: DateFormatter
private let maxFileSize = 1024 * 512
private let maxLogEntries = 1000
@@ -20,17 +19,12 @@ final class Logger {
private init() {
let tmpDir = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
logFileURL = tmpDir.appendingPathComponent("logs.txt")
dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
}
func log(_ message: String, type: String = "General") {
let entry = LogEntry(message: message, type: type, timestamp: Date())
queue.async(flags: .barrier) { [weak self] in
guard let self else { return }
queue.async(flags: .barrier) {
self.logs.append(entry)
if self.logs.count > self.maxLogEntries {
@@ -38,20 +32,15 @@ final class Logger {
}
self.saveLogToFile(entry)
#if DEBUG
self.debugLog(entry)
#endif
DispatchQueue.main.async {
NotificationCenter.default.post(
name: NSNotification.Name("LoggerNotification"),
object: nil,
userInfo: [
"message": message,
"type": type,
"timestamp": entry.timestamp
]
NotificationCenter.default.post(name: NSNotification.Name("LoggerNotification"), object: nil,
userInfo: [
"message": message,
"type": type,
"timestamp": entry.timestamp
]
)
}
}
@@ -60,6 +49,8 @@ final class Logger {
func getLogs() -> String {
var result = ""
queue.sync {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
result = logs.map { "[\(dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
.joined(separator: "\n----\n")
}
@@ -68,12 +59,10 @@ final class Logger {
func getLogsAsync() async -> String {
return await withCheckedContinuation { continuation in
queue.async { [weak self] in
guard let self else {
continuation.resume(returning: "")
return
}
let result = self.logs.map { "[\(self.dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
queue.async {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
let result = self.logs.map { "[\(dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
.joined(separator: "\n----\n")
continuation.resume(returning: result)
}
@@ -81,8 +70,7 @@ final class Logger {
}
func clearLogs() {
queue.async(flags: .barrier) { [weak self] in
guard let self else { return }
queue.async(flags: .barrier) {
self.logs.removeAll()
try? FileManager.default.removeItem(at: self.logFileURL)
}
@@ -90,11 +78,7 @@ final class Logger {
func clearLogsAsync() async {
await withCheckedContinuation { continuation in
queue.async(flags: .barrier) { [weak self] in
guard let self else {
continuation.resume()
return
}
queue.async(flags: .barrier) {
self.logs.removeAll()
try? FileManager.default.removeItem(at: self.logFileURL)
continuation.resume()
@@ -103,9 +87,13 @@ final class Logger {
}
private func saveLogToFile(_ log: LogEntry) {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
let logString = "[\(dateFormatter.string(from: log.timestamp))] [\(log.type)] \(log.message)\n---\n"
guard let data = logString.data(using: .utf8) else {
print("Failed to encode log string to UTF-8")
return
}
@@ -127,6 +115,7 @@ final class Logger {
try data.write(to: logFileURL)
}
} catch {
print("Error managing log file: \(error)")
try? data.write(to: logFileURL)
}
}
@@ -149,14 +138,17 @@ final class Logger {
try truncatedData.write(to: logFileURL)
}
} catch {
print("Error truncating log file: \(error)")
try? FileManager.default.removeItem(at: logFileURL)
}
}
#if DEBUG
private func debugLog(_ entry: LogEntry) {
#if DEBUG
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
let formattedMessage = "[\(dateFormatter.string(from: entry.timestamp))] [\(entry.type)] \(entry.message)"
NSLog("%@", formattedMessage)
print(formattedMessage)
#endif
}
#endif
}

View File

@@ -0,0 +1,746 @@
import UIKit
import MPVKit
import CoreMedia
import CoreVideo
import AVFoundation
protocol MPVLayerRendererDelegate: AnyObject {
func renderer(_ renderer: MPVLayerRenderer, didUpdatePosition position: Double, duration: Double)
func renderer(_ renderer: MPVLayerRenderer, didChangePause isPaused: Bool)
func renderer(_ renderer: MPVLayerRenderer, didChangeLoading isLoading: Bool)
func renderer(_ renderer: MPVLayerRenderer, didBecomeReadyToSeek: Bool)
func renderer(_ renderer: MPVLayerRenderer, didBecomeTracksReady: Bool)
}
/// MPV player using vo_avfoundation for video output.
/// This renders video directly to AVSampleBufferDisplayLayer for PiP support.
final class MPVLayerRenderer {
enum RendererError: Error {
case mpvCreationFailed
case mpvInitialization(Int32)
}
private let displayLayer: AVSampleBufferDisplayLayer
private let queue = DispatchQueue(label: "mpv.avfoundation", qos: .userInitiated)
private let stateQueue = DispatchQueue(label: "mpv.avfoundation.state", attributes: .concurrent)
private var mpv: OpaquePointer?
private var currentPreset: PlayerPreset?
private var currentURL: URL?
private var currentHeaders: [String: String]?
private var pendingExternalSubtitles: [String] = []
private var initialSubtitleId: Int?
private var initialAudioId: Int?
private var isRunning = false
private var isStopping = false
weak var delegate: MPVLayerRendererDelegate?
// Thread-safe state for playback
private var _cachedDuration: Double = 0
private var _cachedPosition: Double = 0
private var _isPaused: Bool = true
private var _playbackSpeed: Double = 1.0
private var _isLoading: Bool = false
private var _isReadyToSeek: Bool = false
// Thread-safe accessors
private var cachedDuration: Double {
get { stateQueue.sync { _cachedDuration } }
set { stateQueue.async(flags: .barrier) { self._cachedDuration = newValue } }
}
private var cachedPosition: Double {
get { stateQueue.sync { _cachedPosition } }
set { stateQueue.async(flags: .barrier) { self._cachedPosition = newValue } }
}
private var isPaused: Bool {
get { stateQueue.sync { _isPaused } }
set { stateQueue.async(flags: .barrier) { self._isPaused = newValue } }
}
private var playbackSpeed: Double {
get { stateQueue.sync { _playbackSpeed } }
set { stateQueue.async(flags: .barrier) { self._playbackSpeed = newValue } }
}
private var isLoading: Bool {
get { stateQueue.sync { _isLoading } }
set { stateQueue.async(flags: .barrier) { self._isLoading = newValue } }
}
private var isReadyToSeek: Bool {
get { stateQueue.sync { _isReadyToSeek } }
set { stateQueue.async(flags: .barrier) { self._isReadyToSeek = newValue } }
}
var isPausedState: Bool {
return isPaused
}
init(displayLayer: AVSampleBufferDisplayLayer) {
self.displayLayer = displayLayer
}
deinit {
stop()
}
func start() throws {
guard !isRunning else { return }
guard let handle = mpv_create() else {
throw RendererError.mpvCreationFailed
}
mpv = handle
// Logging
#if DEBUG
checkError(mpv_request_log_messages(handle, "warn"))
#else
checkError(mpv_request_log_messages(handle, "no"))
#endif
// Pass the AVSampleBufferDisplayLayer to mpv via --wid
// The vo_avfoundation driver expects this
var displayLayerPtr = Int64(Int(bitPattern: Unmanaged.passUnretained(displayLayer).toOpaque()))
checkError(mpv_set_option(handle, "wid", MPV_FORMAT_INT64, &displayLayerPtr))
// Use AVFoundation video output - required for PiP support
checkError(mpv_set_option_string(handle, "vo", "avfoundation"))
// Enable composite OSD mode - renders subtitles directly onto video frames using GPU
// This is better for PiP as subtitles are baked into the video
checkError(mpv_set_option_string(handle, "avfoundation-composite-osd", "yes"))
// Hardware decoding with VideoToolbox - REQUIRED for vo_avfoundation
// vo_avfoundation ONLY accepts IMGFMT_VIDEOTOOLBOX frames
checkError(mpv_set_option_string(handle, "hwdec", "videotoolbox"))
checkError(mpv_set_option_string(handle, "hwdec-codecs", "all"))
checkError(mpv_set_option_string(handle, "hwdec-software-fallback", "no"))
// Seeking optimization - faster seeking at the cost of less precision
// Use keyframe seeking by default (much faster for network streams)
checkError(mpv_set_option_string(handle, "hr-seek", "no"))
// Drop frames during seeking for faster response
checkError(mpv_set_option_string(handle, "hr-seek-framedrop", "yes"))
// Demuxer cache settings for better network streaming
checkError(mpv_set_option_string(handle, "cache", "yes"))
checkError(mpv_set_option_string(handle, "demuxer-max-bytes", "150MiB"))
checkError(mpv_set_option_string(handle, "demuxer-max-back-bytes", "75MiB"))
checkError(mpv_set_option_string(handle, "demuxer-readahead-secs", "20"))
// Subtitle and audio settings
checkError(mpv_set_option_string(mpv, "subs-match-os-language", "yes"))
checkError(mpv_set_option_string(mpv, "subs-fallback", "yes"))
// Initialize mpv
let initStatus = mpv_initialize(handle)
guard initStatus >= 0 else {
throw RendererError.mpvInitialization(initStatus)
}
// Observe properties
observeProperties()
// Setup wakeup callback
mpv_set_wakeup_callback(handle, { ctx in
guard let ctx = ctx else { return }
let instance = Unmanaged<MPVLayerRenderer>.fromOpaque(ctx).takeUnretainedValue()
instance.processEvents()
}, Unmanaged.passUnretained(self).toOpaque())
isRunning = true
}
func stop() {
if isStopping { return }
if !isRunning, mpv == nil { return }
isRunning = false
isStopping = true
queue.sync { [weak self] in
guard let self, let handle = self.mpv else { return }
mpv_set_wakeup_callback(handle, nil, nil)
mpv_terminate_destroy(handle)
self.mpv = nil
}
DispatchQueue.main.async { [weak self] in
guard let self else { return }
if #available(iOS 18.0, *) {
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
} else {
self.displayLayer.flushAndRemoveImage()
}
}
isStopping = false
}
func load(
url: URL,
with preset: PlayerPreset,
headers: [String: String]? = nil,
startPosition: Double? = nil,
externalSubtitles: [String]? = nil,
initialSubtitleId: Int? = nil,
initialAudioId: Int? = nil
) {
currentPreset = preset
currentURL = url
currentHeaders = headers
pendingExternalSubtitles = externalSubtitles ?? []
self.initialSubtitleId = initialSubtitleId
self.initialAudioId = initialAudioId
queue.async { [weak self] in
guard let self else { return }
self.isLoading = true
self.isReadyToSeek = false
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didChangeLoading: true)
}
guard let handle = self.mpv else { return }
self.apply(commands: preset.commands, on: handle)
// Stop previous playback before loading new file
self.command(handle, ["stop"])
self.updateHTTPHeaders(headers)
// Set start position
if let startPos = startPosition, startPos > 0 {
self.setProperty(name: "start", value: String(format: "%.2f", startPos))
} else {
self.setProperty(name: "start", value: "0")
}
// Set initial audio track if specified
if let audioId = self.initialAudioId, audioId > 0 {
self.setAudioTrack(audioId)
}
// Set initial subtitle track if no external subs
if self.pendingExternalSubtitles.isEmpty {
if let subId = self.initialSubtitleId {
self.setSubtitleTrack(subId)
} else {
self.disableSubtitles()
}
} else {
self.disableSubtitles()
}
let target = url.isFileURL ? url.path : url.absoluteString
self.command(handle, ["loadfile", target, "replace"])
}
}
func reloadCurrentItem() {
guard let url = currentURL, let preset = currentPreset else { return }
load(url: url, with: preset, headers: currentHeaders)
}
func applyPreset(_ preset: PlayerPreset) {
currentPreset = preset
guard let handle = mpv else { return }
queue.async { [weak self] in
guard let self else { return }
self.apply(commands: preset.commands, on: handle)
}
}
// MARK: - Property Helpers
private func setOption(name: String, value: String) {
guard let handle = mpv else { return }
checkError(mpv_set_option_string(handle, name, value))
}
private func setProperty(name: String, value: String) {
guard let handle = mpv else { return }
let status = mpv_set_property_string(handle, name, value)
if status < 0 {
Logger.shared.log("Failed to set property \(name)=\(value) (\(status))", type: "Warn")
}
}
private func clearProperty(name: String) {
guard let handle = mpv else { return }
let status = mpv_set_property(handle, name, MPV_FORMAT_NONE, nil)
if status < 0 {
Logger.shared.log("Failed to clear property \(name) (\(status))", type: "Warn")
}
}
private func updateHTTPHeaders(_ headers: [String: String]?) {
guard let headers, !headers.isEmpty else {
clearProperty(name: "http-header-fields")
return
}
let headerString = headers
.map { key, value in "\(key): \(value)" }
.joined(separator: "\r\n")
setProperty(name: "http-header-fields", value: headerString)
}
private func observeProperties() {
guard let handle = mpv else { return }
let properties: [(String, mpv_format)] = [
("duration", MPV_FORMAT_DOUBLE),
("time-pos", MPV_FORMAT_DOUBLE),
("pause", MPV_FORMAT_FLAG),
("track-list/count", MPV_FORMAT_INT64),
("paused-for-cache", MPV_FORMAT_FLAG)
]
for (name, format) in properties {
mpv_observe_property(handle, 0, name, format)
}
}
private func apply(commands: [[String]], on handle: OpaquePointer) {
for command in commands {
guard !command.isEmpty else { continue }
self.command(handle, command)
}
}
private func command(_ handle: OpaquePointer, _ args: [String]) {
guard !args.isEmpty else { return }
_ = withCStringArray(args) { pointer in
mpv_command_async(handle, 0, pointer)
}
}
private func commandSync(_ handle: OpaquePointer, _ args: [String]) -> Int32 {
guard !args.isEmpty else { return -1 }
return withCStringArray(args) { pointer in
mpv_command(handle, pointer)
}
}
private func checkError(_ status: CInt) {
if status < 0 {
Logger.shared.log("MPV API error: \(String(cString: mpv_error_string(status)))", type: "Error")
}
}
// MARK: - Event Handling
private func processEvents() {
queue.async { [weak self] in
guard let self else { return }
while self.mpv != nil && !self.isStopping {
guard let handle = self.mpv,
let eventPointer = mpv_wait_event(handle, 0) else { return }
let event = eventPointer.pointee
if event.event_id == MPV_EVENT_NONE { break }
self.handleEvent(event)
if event.event_id == MPV_EVENT_SHUTDOWN { break }
}
}
}
private func handleEvent(_ event: mpv_event) {
switch event.event_id {
case MPV_EVENT_FILE_LOADED:
// Add external subtitles now that the file is loaded
let hadExternalSubs = !pendingExternalSubtitles.isEmpty
if hadExternalSubs, let handle = mpv {
for subUrl in pendingExternalSubtitles {
command(handle, ["sub-add", subUrl])
}
pendingExternalSubtitles = []
// Set subtitle after external subs are added
if let subId = initialSubtitleId {
setSubtitleTrack(subId)
} else {
disableSubtitles()
}
}
if !isReadyToSeek {
isReadyToSeek = true
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didBecomeReadyToSeek: true)
}
}
// Notify loading ended
if isLoading {
isLoading = false
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didChangeLoading: false)
}
}
case MPV_EVENT_SEEK:
// Seek started - show loading indicator
if !isLoading {
isLoading = true
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didChangeLoading: true)
}
}
case MPV_EVENT_PLAYBACK_RESTART:
// Video playback has started/restarted (including after seek)
if isLoading {
isLoading = false
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didChangeLoading: false)
}
}
case MPV_EVENT_PROPERTY_CHANGE:
if let property = event.data?.assumingMemoryBound(to: mpv_event_property.self).pointee.name {
let name = String(cString: property)
refreshProperty(named: name, event: event)
}
case MPV_EVENT_SHUTDOWN:
Logger.shared.log("mpv shutdown", type: "Warn")
case MPV_EVENT_LOG_MESSAGE:
if let logMessagePointer = event.data?.assumingMemoryBound(to: mpv_event_log_message.self) {
let component = String(cString: logMessagePointer.pointee.prefix)
let text = String(cString: logMessagePointer.pointee.text)
let lower = text.lowercased()
if lower.contains("error") {
Logger.shared.log("mpv[\(component)] \(text)", type: "Error")
} else if lower.contains("warn") || lower.contains("warning") {
Logger.shared.log("mpv[\(component)] \(text)", type: "Warn")
}
}
default:
break
}
}
private func refreshProperty(named name: String, event: mpv_event) {
guard let handle = mpv else { return }
switch name {
case "duration":
var value = Double(0)
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_DOUBLE, value: &value)
if status >= 0 {
cachedDuration = value
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didUpdatePosition: self.cachedPosition, duration: self.cachedDuration)
}
}
case "time-pos":
var value = Double(0)
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_DOUBLE, value: &value)
if status >= 0 {
cachedPosition = value
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didUpdatePosition: self.cachedPosition, duration: self.cachedDuration)
}
}
case "pause":
var flag: Int32 = 0
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_FLAG, value: &flag)
if status >= 0 {
let newPaused = flag != 0
if newPaused != isPaused {
isPaused = newPaused
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didChangePause: self.isPaused)
}
}
}
case "paused-for-cache":
var flag: Int32 = 0
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_FLAG, value: &flag)
if status >= 0 {
let buffering = flag != 0
if buffering != isLoading {
isLoading = buffering
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didChangeLoading: buffering)
}
}
}
case "track-list/count":
var trackCount: Int64 = 0
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_INT64, value: &trackCount)
if status >= 0 && trackCount > 0 {
Logger.shared.log("Track list updated: \(trackCount) tracks available", type: "Info")
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.delegate?.renderer(self, didBecomeTracksReady: true)
}
}
default:
break
}
}
private func getStringProperty(handle: OpaquePointer, name: String) -> String? {
var result: String?
if let cString = mpv_get_property_string(handle, name) {
result = String(cString: cString)
mpv_free(cString)
}
return result
}
@discardableResult
private func getProperty<T>(handle: OpaquePointer, name: String, format: mpv_format, value: inout T) -> Int32 {
return withUnsafeMutablePointer(to: &value) { mutablePointer in
return mpv_get_property(handle, name, format, mutablePointer)
}
}
@inline(__always)
private func withCStringArray<R>(_ args: [String], body: (UnsafeMutablePointer<UnsafePointer<CChar>?>?) -> R) -> R {
var cStrings = [UnsafeMutablePointer<CChar>?]()
cStrings.reserveCapacity(args.count + 1)
for s in args {
cStrings.append(strdup(s))
}
cStrings.append(nil)
defer {
for ptr in cStrings where ptr != nil {
free(ptr)
}
}
return cStrings.withUnsafeMutableBufferPointer { buffer in
return buffer.baseAddress!.withMemoryRebound(to: UnsafePointer<CChar>?.self, capacity: buffer.count) { rebound in
return body(UnsafeMutablePointer(mutating: rebound))
}
}
}
// MARK: - Playback Controls
func play() {
setProperty(name: "pause", value: "no")
}
func pausePlayback() {
setProperty(name: "pause", value: "yes")
}
func togglePause() {
if isPaused { play() } else { pausePlayback() }
}
func seek(to seconds: Double) {
guard let handle = mpv else { return }
let clamped = max(0, seconds)
cachedPosition = clamped
commandSync(handle, ["seek", String(clamped), "absolute"])
}
func seek(by seconds: Double) {
guard let handle = mpv else { return }
let newPosition = max(0, cachedPosition + seconds)
cachedPosition = newPosition
commandSync(handle, ["seek", String(seconds), "relative"])
}
/// Sync timebase - no-op for vo_avfoundation (mpv handles timing)
func syncTimebase() {
// vo_avfoundation manages its own timebase
}
func setSpeed(_ speed: Double) {
playbackSpeed = speed
setProperty(name: "speed", value: String(speed))
}
func getSpeed() -> Double {
guard let handle = mpv else { return 1.0 }
var speed: Double = 1.0
getProperty(handle: handle, name: "speed", format: MPV_FORMAT_DOUBLE, value: &speed)
return speed
}
// MARK: - Subtitle Controls
func getSubtitleTracks() -> [[String: Any]] {
guard let handle = mpv else {
Logger.shared.log("getSubtitleTracks: mpv handle is nil", type: "Warn")
return []
}
var tracks: [[String: Any]] = []
var trackCount: Int64 = 0
getProperty(handle: handle, name: "track-list/count", format: MPV_FORMAT_INT64, value: &trackCount)
for i in 0..<trackCount {
guard let trackType = getStringProperty(handle: handle, name: "track-list/\(i)/type"),
trackType == "sub" else { continue }
var trackId: Int64 = 0
getProperty(handle: handle, name: "track-list/\(i)/id", format: MPV_FORMAT_INT64, value: &trackId)
var track: [String: Any] = ["id": Int(trackId)]
if let title = getStringProperty(handle: handle, name: "track-list/\(i)/title") {
track["title"] = title
}
if let lang = getStringProperty(handle: handle, name: "track-list/\(i)/lang") {
track["lang"] = lang
}
var selected: Int32 = 0
getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected)
track["selected"] = selected != 0
Logger.shared.log("getSubtitleTracks: found sub track id=\(trackId), title=\(track["title"] ?? "none"), lang=\(track["lang"] ?? "none")", type: "Info")
tracks.append(track)
}
Logger.shared.log("getSubtitleTracks: returning \(tracks.count) subtitle tracks", type: "Info")
return tracks
}
func setSubtitleTrack(_ trackId: Int) {
Logger.shared.log("setSubtitleTrack: setting sid to \(trackId)", type: "Info")
guard mpv != nil else {
Logger.shared.log("setSubtitleTrack: mpv handle is nil!", type: "Error")
return
}
if trackId < 0 {
setProperty(name: "sid", value: "no")
} else {
setProperty(name: "sid", value: String(trackId))
}
}
func disableSubtitles() {
setProperty(name: "sid", value: "no")
}
func getCurrentSubtitleTrack() -> Int {
guard let handle = mpv else { return 0 }
var sid: Int64 = 0
getProperty(handle: handle, name: "sid", format: MPV_FORMAT_INT64, value: &sid)
return Int(sid)
}
func addSubtitleFile(url: String, select: Bool = true) {
guard let handle = mpv else { return }
let flag = select ? "select" : "cached"
commandSync(handle, ["sub-add", url, flag])
}
// MARK: - Subtitle Positioning
func setSubtitlePosition(_ position: Int) {
setProperty(name: "sub-pos", value: String(position))
}
func setSubtitleScale(_ scale: Double) {
setProperty(name: "sub-scale", value: String(scale))
}
func setSubtitleMarginY(_ margin: Int) {
setProperty(name: "sub-margin-y", value: String(margin))
}
func setSubtitleAlignX(_ alignment: String) {
setProperty(name: "sub-align-x", value: alignment)
}
func setSubtitleAlignY(_ alignment: String) {
setProperty(name: "sub-align-y", value: alignment)
}
func setSubtitleFontSize(_ size: Int) {
setProperty(name: "sub-font-size", value: String(size))
}
// MARK: - Audio Track Controls
func getAudioTracks() -> [[String: Any]] {
guard let handle = mpv else {
Logger.shared.log("getAudioTracks: mpv handle is nil", type: "Warn")
return []
}
var tracks: [[String: Any]] = []
var trackCount: Int64 = 0
getProperty(handle: handle, name: "track-list/count", format: MPV_FORMAT_INT64, value: &trackCount)
for i in 0..<trackCount {
guard let trackType = getStringProperty(handle: handle, name: "track-list/\(i)/type"),
trackType == "audio" else { continue }
var trackId: Int64 = 0
getProperty(handle: handle, name: "track-list/\(i)/id", format: MPV_FORMAT_INT64, value: &trackId)
var track: [String: Any] = ["id": Int(trackId)]
if let title = getStringProperty(handle: handle, name: "track-list/\(i)/title") {
track["title"] = title
}
if let lang = getStringProperty(handle: handle, name: "track-list/\(i)/lang") {
track["lang"] = lang
}
if let codec = getStringProperty(handle: handle, name: "track-list/\(i)/codec") {
track["codec"] = codec
}
var channels: Int64 = 0
getProperty(handle: handle, name: "track-list/\(i)/audio-channels", format: MPV_FORMAT_INT64, value: &channels)
if channels > 0 {
track["channels"] = Int(channels)
}
var selected: Int32 = 0
getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected)
track["selected"] = selected != 0
Logger.shared.log("getAudioTracks: found audio track id=\(trackId), title=\(track["title"] ?? "none"), lang=\(track["lang"] ?? "none")", type: "Info")
tracks.append(track)
}
Logger.shared.log("getAudioTracks: returning \(tracks.count) audio tracks", type: "Info")
return tracks
}
func setAudioTrack(_ trackId: Int) {
guard mpv != nil else {
Logger.shared.log("setAudioTrack: mpv handle is nil", type: "Warn")
return
}
Logger.shared.log("setAudioTrack: setting aid to \(trackId)", type: "Info")
setProperty(name: "aid", value: String(trackId))
}
func getCurrentAudioTrack() -> Int {
guard let handle = mpv else { return 0 }
var aid: Int64 = 0
getProperty(handle: handle, name: "aid", format: MPV_FORMAT_INT64, value: &aid)
return Int(aid)
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@ Pod::Spec.new do |s|
s.static_framework = true
s.dependency 'ExpoModulesCore'
s.dependency 'MPVKit', '~> 0.40.0'
s.dependency 'MPVKit-GPL'
# Swift/Objective-C compatibility
s.pod_target_xcconfig = {

View File

@@ -4,7 +4,25 @@ public class MpvPlayerModule: Module {
public func definition() -> ModuleDefinition {
Name("MpvPlayer")
// Enables the module to be used as a native view
// Defines event names that the module can send to JavaScript.
Events("onChange")
// Defines a JavaScript synchronous function that runs the native code on the JavaScript thread.
Function("hello") {
return "Hello from MPV Player! 👋"
}
// Defines a JavaScript function that always returns a Promise and whose native code
// is by default dispatched on the different thread than the JavaScript runtime runs on.
AsyncFunction("setValueAsync") { (value: String) in
// Send an event to JavaScript.
self.sendEvent("onChange", [
"value": value
])
}
// Enables the module to be used as a native view. Definition components that are accepted as part of the
// view definition: Prop, Events.
View(MpvPlayerView.self) {
// All video load options are passed via a single "source" prop
Prop("source") { (view: MpvPlayerView, source: [String: Any]?) in
@@ -25,44 +43,52 @@ public class MpvPlayerModule: Module {
view.loadVideo(config: config)
}
// Playback controls
// Async function to play video
AsyncFunction("play") { (view: MpvPlayerView) in
view.play()
}
// Async function to pause video
AsyncFunction("pause") { (view: MpvPlayerView) in
view.pause()
}
// Async function to seek to position
AsyncFunction("seekTo") { (view: MpvPlayerView, position: Double) in
view.seekTo(position: position)
}
// Async function to seek by offset
AsyncFunction("seekBy") { (view: MpvPlayerView, offset: Double) in
view.seekBy(offset: offset)
}
// Async function to set playback speed
AsyncFunction("setSpeed") { (view: MpvPlayerView, speed: Double) in
view.setSpeed(speed: speed)
}
// Function to get current speed
AsyncFunction("getSpeed") { (view: MpvPlayerView) -> Double in
return view.getSpeed()
}
// Function to check if paused
AsyncFunction("isPaused") { (view: MpvPlayerView) -> Bool in
return view.isPaused()
}
// Function to get current position
AsyncFunction("getCurrentPosition") { (view: MpvPlayerView) -> Double in
return view.getCurrentPosition()
}
// Function to get duration
AsyncFunction("getDuration") { (view: MpvPlayerView) -> Double in
return view.getDuration()
}
// Picture in Picture
// Picture in Picture functions
AsyncFunction("startPictureInPicture") { (view: MpvPlayerView) in
view.startPictureInPicture()
}
@@ -100,7 +126,7 @@ public class MpvPlayerModule: Module {
view.addSubtitleFile(url: url, select: select)
}
// Subtitle positioning
// Subtitle positioning functions
AsyncFunction("setSubtitlePosition") { (view: MpvPlayerView, position: Int) in
view.setSubtitlePosition(position)
}
@@ -138,7 +164,7 @@ public class MpvPlayerModule: Module {
return view.getCurrentAudioTrack()
}
// Events that the view can send to JavaScript
// Defines events that the view can send to JavaScript
Events("onLoad", "onPlaybackStateChange", "onProgress", "onError", "onTracksReady")
}
}

View File

@@ -5,376 +5,407 @@ import UIKit
/// Configuration for loading a video
struct VideoLoadConfig {
let url: URL
var headers: [String: String]?
var externalSubtitles: [String]?
var startPosition: Double?
var autoplay: Bool
/// MPV subtitle track ID to select on start (1-based, -1 to disable, nil to use default)
var initialSubtitleId: Int?
/// MPV audio track ID to select on start (1-based, nil to use default)
var initialAudioId: Int?
init(
url: URL,
headers: [String: String]? = nil,
externalSubtitles: [String]? = nil,
startPosition: Double? = nil,
autoplay: Bool = true,
initialSubtitleId: Int? = nil,
initialAudioId: Int? = nil
) {
self.url = url
self.headers = headers
self.externalSubtitles = externalSubtitles
self.startPosition = startPosition
self.autoplay = autoplay
self.initialSubtitleId = initialSubtitleId
self.initialAudioId = initialAudioId
}
let url: URL
var headers: [String: String]?
var externalSubtitles: [String]?
var startPosition: Double?
var autoplay: Bool
/// MPV subtitle track ID to select on start (1-based, -1 to disable, nil to use default)
var initialSubtitleId: Int?
/// MPV audio track ID to select on start (1-based, nil to use default)
var initialAudioId: Int?
init(
url: URL,
headers: [String: String]? = nil,
externalSubtitles: [String]? = nil,
startPosition: Double? = nil,
autoplay: Bool = true,
initialSubtitleId: Int? = nil,
initialAudioId: Int? = nil
) {
self.url = url
self.headers = headers
self.externalSubtitles = externalSubtitles
self.startPosition = startPosition
self.autoplay = autoplay
self.initialSubtitleId = initialSubtitleId
self.initialAudioId = initialAudioId
}
}
// This view will be used as a native component. Make sure to inherit from `ExpoView`
// to apply the proper styling (e.g. border radius and shadows).
class MpvPlayerView: ExpoView {
private let displayLayer = AVSampleBufferDisplayLayer()
private var renderer: MPVMetalRenderer?
private var videoContainer: UIView!
private var pipController: PiPController?
private let displayLayer = AVSampleBufferDisplayLayer()
private var renderer: MPVLayerRenderer?
private var videoContainer: UIView!
private var pipController: PiPController?
let onLoad = EventDispatcher()
let onPlaybackStateChange = EventDispatcher()
let onProgress = EventDispatcher()
let onError = EventDispatcher()
let onTracksReady = EventDispatcher()
let onLoad = EventDispatcher()
let onPlaybackStateChange = EventDispatcher()
let onProgress = EventDispatcher()
let onError = EventDispatcher()
let onTracksReady = EventDispatcher()
private var currentURL: URL?
private var cachedPosition: Double = 0
private var cachedDuration: Double = 0
private var intendedPlayState: Bool = false
private var currentURL: URL?
private var cachedPosition: Double = 0
private var cachedDuration: Double = 0
private var intendedPlayState: Bool = false // For PiP - ignores transient states during seek
required init(appContext: AppContext? = nil) {
super.init(appContext: appContext)
setupView()
}
required init(appContext: AppContext? = nil) {
super.init(appContext: appContext)
setupView()
}
private func setupView() {
clipsToBounds = true
backgroundColor = .black
private func setupView() {
clipsToBounds = true
backgroundColor = .black
videoContainer = UIView()
videoContainer.translatesAutoresizingMaskIntoConstraints = false
videoContainer.backgroundColor = .black
videoContainer.clipsToBounds = true
addSubview(videoContainer)
videoContainer = UIView()
videoContainer.translatesAutoresizingMaskIntoConstraints = false
videoContainer.backgroundColor = .black
videoContainer.clipsToBounds = true
addSubview(videoContainer)
displayLayer.frame = bounds
displayLayer.videoGravity = .resizeAspect
if #available(iOS 17.0, *) {
displayLayer.wantsExtendedDynamicRangeContent = true
}
displayLayer.backgroundColor = UIColor.black.cgColor
videoContainer.layer.addSublayer(displayLayer)
displayLayer.frame = bounds
displayLayer.videoGravity = .resizeAspect
if #available(iOS 17.0, *) {
displayLayer.wantsExtendedDynamicRangeContent = true
}
displayLayer.backgroundColor = UIColor.black.cgColor
videoContainer.layer.addSublayer(displayLayer)
NSLayoutConstraint.activate([
videoContainer.topAnchor.constraint(equalTo: topAnchor),
videoContainer.leadingAnchor.constraint(equalTo: leadingAnchor),
videoContainer.trailingAnchor.constraint(equalTo: trailingAnchor),
videoContainer.bottomAnchor.constraint(equalTo: bottomAnchor)
])
NSLayoutConstraint.activate([
videoContainer.topAnchor.constraint(equalTo: topAnchor),
videoContainer.leadingAnchor.constraint(equalTo: leadingAnchor),
videoContainer.trailingAnchor.constraint(equalTo: trailingAnchor),
videoContainer.bottomAnchor.constraint(equalTo: bottomAnchor)
])
do {
renderer = try MPVMetalRenderer(displayLayer: displayLayer)
renderer?.delegate = self
try renderer?.start()
} catch MPVMetalRenderer.RendererError.metalNotSupported {
onError(["error": "Metal is not supported on this device"])
} catch {
onError(["error": "Failed to start renderer: \(error.localizedDescription)"])
}
renderer = MPVLayerRenderer(displayLayer: displayLayer)
renderer?.delegate = self
// Setup PiP
pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
pipController?.delegate = self
}
// Setup PiP
pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
pipController?.delegate = self
override func layoutSubviews() {
super.layoutSubviews()
CATransaction.begin()
CATransaction.setDisableActions(true)
displayLayer.frame = videoContainer.bounds
displayLayer.isHidden = false
displayLayer.opacity = 1.0
CATransaction.commit()
}
do {
try renderer?.start()
} catch {
onError(["error": "Failed to start renderer: \(error.localizedDescription)"])
}
}
func loadVideo(config: VideoLoadConfig) {
// Skip reload if same URL is already playing
if currentURL == config.url {
return
}
currentURL = config.url
override func layoutSubviews() {
super.layoutSubviews()
CATransaction.begin()
CATransaction.setDisableActions(true)
displayLayer.frame = videoContainer.bounds
displayLayer.isHidden = false
displayLayer.opacity = 1.0
CATransaction.commit()
}
let preset = PlayerPreset(
id: .sdrRec709,
title: "Default",
summary: "Default playback preset",
stream: nil,
commands: []
)
func loadVideo(config: VideoLoadConfig) {
// Skip reload if same URL is already playing
if currentURL == config.url {
return
}
currentURL = config.url
// Pass everything to the renderer
renderer?.load(
url: config.url,
with: preset,
headers: config.headers,
startPosition: config.startPosition,
externalSubtitles: config.externalSubtitles,
initialSubtitleId: config.initialSubtitleId,
initialAudioId: config.initialAudioId
)
if config.autoplay {
play()
}
onLoad(["url": config.url.absoluteString])
}
// Convenience method for simple loads
func loadVideo(url: URL, headers: [String: String]? = nil) {
loadVideo(config: VideoLoadConfig(url: url, headers: headers))
}
let preset = PlayerPreset(
id: .sdrRec709,
title: "Default",
summary: "Default playback preset",
stream: nil,
commands: []
)
func play() {
intendedPlayState = true
renderer?.play()
pipController?.updatePlaybackState()
}
// Pass everything to the renderer - it handles start position and external subs
renderer?.load(
url: config.url,
with: preset,
headers: config.headers,
startPosition: config.startPosition,
externalSubtitles: config.externalSubtitles,
initialSubtitleId: config.initialSubtitleId,
initialAudioId: config.initialAudioId
)
if config.autoplay {
play()
}
onLoad(["url": config.url.absoluteString])
}
// Convenience method for simple loads
func loadVideo(url: URL, headers: [String: String]? = nil) {
loadVideo(config: VideoLoadConfig(url: url, headers: headers))
}
func pause() {
intendedPlayState = false
renderer?.pausePlayback()
pipController?.updatePlaybackState()
}
func play() {
intendedPlayState = true
renderer?.play()
pipController?.setPlaybackRate(1.0)
pipController?.updatePlaybackState()
}
func seekTo(position: Double) {
renderer?.seek(to: position)
}
func pause() {
intendedPlayState = false
renderer?.pausePlayback()
pipController?.setPlaybackRate(0.0)
pipController?.updatePlaybackState()
}
func seekBy(offset: Double) {
renderer?.seek(by: offset)
}
func seekTo(position: Double) {
renderer?.seek(to: position)
}
func setSpeed(speed: Double) {
renderer?.setSpeed(speed)
}
func seekBy(offset: Double) {
renderer?.seek(by: offset)
}
func getSpeed() -> Double {
return renderer?.getSpeed() ?? 1.0
}
func setSpeed(speed: Double) {
renderer?.setSpeed(speed)
}
func isPaused() -> Bool {
return renderer?.isPausedState ?? true
}
func getSpeed() -> Double {
return renderer?.getSpeed() ?? 1.0
}
func getCurrentPosition() -> Double {
return cachedPosition
}
func isPaused() -> Bool {
return renderer?.isPausedState ?? true
}
func getDuration() -> Double {
return cachedDuration
}
func getCurrentPosition() -> Double {
return cachedPosition
}
// MARK: - Picture in Picture
func getDuration() -> Double {
return cachedDuration
}
func startPictureInPicture() {
pipController?.startPictureInPicture()
}
// MARK: - Picture in Picture
func stopPictureInPicture() {
pipController?.stopPictureInPicture()
}
func startPictureInPicture() {
print("🎬 MpvPlayerView: startPictureInPicture called")
print("🎬 Duration: \(getDuration()), IsPlaying: \(!isPaused())")
pipController?.startPictureInPicture()
}
func isPictureInPictureSupported() -> Bool {
return pipController?.isPictureInPictureSupported ?? false
}
func stopPictureInPicture() {
pipController?.stopPictureInPicture()
}
func isPictureInPictureActive() -> Bool {
return pipController?.isPictureInPictureActive ?? false
}
// MARK: - Subtitle Controls
func getSubtitleTracks() -> [[String: Any]] {
return renderer?.getSubtitleTracks() ?? []
}
func setSubtitleTrack(_ trackId: Int) {
renderer?.setSubtitleTrack(trackId)
}
func disableSubtitles() {
renderer?.disableSubtitles()
}
func getCurrentSubtitleTrack() -> Int {
return renderer?.getCurrentSubtitleTrack() ?? 0
}
func addSubtitleFile(url: String, select: Bool = true) {
renderer?.addSubtitleFile(url: url, select: select)
}
// MARK: - Audio Track Controls
func getAudioTracks() -> [[String: Any]] {
return renderer?.getAudioTracks() ?? []
}
func setAudioTrack(_ trackId: Int) {
renderer?.setAudioTrack(trackId)
}
func getCurrentAudioTrack() -> Int {
return renderer?.getCurrentAudioTrack() ?? 0
}
// MARK: - Subtitle Positioning
func setSubtitlePosition(_ position: Int) {
renderer?.setSubtitlePosition(position)
}
func setSubtitleScale(_ scale: Double) {
renderer?.setSubtitleScale(scale)
}
func setSubtitleMarginY(_ margin: Int) {
renderer?.setSubtitleMarginY(margin)
}
func setSubtitleAlignX(_ alignment: String) {
renderer?.setSubtitleAlignX(alignment)
}
func setSubtitleAlignY(_ alignment: String) {
renderer?.setSubtitleAlignY(alignment)
}
func setSubtitleFontSize(_ size: Int) {
renderer?.setSubtitleFontSize(size)
}
func isPictureInPictureSupported() -> Bool {
return pipController?.isPictureInPictureSupported ?? false
}
deinit {
pipController?.stopPictureInPicture()
renderer?.stop()
displayLayer.controlTimebase = nil
displayLayer.removeFromSuperlayer()
}
func isPictureInPictureActive() -> Bool {
return pipController?.isPictureInPictureActive ?? false
}
// MARK: - Subtitle Controls
func getSubtitleTracks() -> [[String: Any]] {
return renderer?.getSubtitleTracks() ?? []
}
func setSubtitleTrack(_ trackId: Int) {
renderer?.setSubtitleTrack(trackId)
}
func disableSubtitles() {
renderer?.disableSubtitles()
}
func getCurrentSubtitleTrack() -> Int {
return renderer?.getCurrentSubtitleTrack() ?? 0
}
func addSubtitleFile(url: String, select: Bool = true) {
renderer?.addSubtitleFile(url: url, select: select)
}
// MARK: - Audio Track Controls
func getAudioTracks() -> [[String: Any]] {
return renderer?.getAudioTracks() ?? []
}
func setAudioTrack(_ trackId: Int) {
renderer?.setAudioTrack(trackId)
}
func getCurrentAudioTrack() -> Int {
return renderer?.getCurrentAudioTrack() ?? 0
}
// MARK: - Subtitle Positioning
func setSubtitlePosition(_ position: Int) {
renderer?.setSubtitlePosition(position)
}
func setSubtitleScale(_ scale: Double) {
renderer?.setSubtitleScale(scale)
}
func setSubtitleMarginY(_ margin: Int) {
renderer?.setSubtitleMarginY(margin)
}
func setSubtitleAlignX(_ alignment: String) {
renderer?.setSubtitleAlignX(alignment)
}
func setSubtitleAlignY(_ alignment: String) {
renderer?.setSubtitleAlignY(alignment)
}
func setSubtitleFontSize(_ size: Int) {
renderer?.setSubtitleFontSize(size)
}
deinit {
pipController?.stopPictureInPicture()
renderer?.stop()
displayLayer.removeFromSuperlayer()
}
}
// MARK: - MPVMetalRendererDelegate
// MARK: - MPVLayerRendererDelegate
extension MpvPlayerView: MPVMetalRendererDelegate {
func renderer(_: MPVMetalRenderer, didUpdatePosition position: Double, duration: Double) {
cachedPosition = position
cachedDuration = duration
DispatchQueue.main.async { [weak self] in
guard let self else { return }
if self.pipController?.isPictureInPictureActive == true {
self.pipController?.updatePlaybackState()
}
self.onProgress([
"position": position,
"duration": duration,
"progress": duration > 0 ? position / duration : 0,
])
}
}
extension MpvPlayerView: MPVLayerRendererDelegate {
func renderer(_: MPVLayerRenderer, didUpdatePosition position: Double, duration: Double) {
cachedPosition = position
cachedDuration = duration
DispatchQueue.main.async { [weak self] in
guard let self else { return }
// Update PiP current time for progress bar
if self.pipController?.isPictureInPictureActive == true {
self.pipController?.setCurrentTimeFromSeconds(position, duration: duration)
}
self.onProgress([
"position": position,
"duration": duration,
"progress": duration > 0 ? position / duration : 0,
])
}
}
func renderer(_: MPVMetalRenderer, didChangePause isPaused: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onPlaybackStateChange([
"isPaused": isPaused,
"isPlaying": !isPaused,
])
}
}
func renderer(_: MPVLayerRenderer, didChangePause isPaused: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
// Don't update intendedPlayState here - it's only set by user actions (play/pause)
// This prevents PiP UI flicker during seeking
// Sync timebase rate with actual playback state
self.pipController?.setPlaybackRate(isPaused ? 0.0 : 1.0)
self.onPlaybackStateChange([
"isPaused": isPaused,
"isPlaying": !isPaused,
])
}
}
func renderer(_: MPVMetalRenderer, didChangeLoading isLoading: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onPlaybackStateChange([
"isLoading": isLoading,
])
}
}
func renderer(_: MPVLayerRenderer, didChangeLoading isLoading: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onPlaybackStateChange([
"isLoading": isLoading,
])
}
}
func renderer(_: MPVMetalRenderer, didBecomeReadyToSeek: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onPlaybackStateChange([
"isReadyToSeek": didBecomeReadyToSeek,
])
}
}
func renderer(_: MPVMetalRenderer, didBecomeTracksReady: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onTracksReady([:])
}
}
func renderer(_: MPVLayerRenderer, didBecomeReadyToSeek: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onPlaybackStateChange([
"isReadyToSeek": didBecomeReadyToSeek,
])
}
}
func renderer(_: MPVLayerRenderer, didBecomeTracksReady: Bool) {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
self.onTracksReady([:])
}
}
}
// MARK: - PiPControllerDelegate
extension MpvPlayerView: PiPControllerDelegate {
func pipController(_ controller: PiPController, willStartPictureInPicture: Bool) {
renderer?.syncTimebase()
pipController?.updatePlaybackState()
}
func pipController(_ controller: PiPController, didStartPictureInPicture: Bool) {
pipController?.updatePlaybackState()
}
func pipController(_ controller: PiPController, willStopPictureInPicture: Bool) {
renderer?.syncTimebase()
}
func pipController(_ controller: PiPController, didStopPictureInPicture: Bool) {
renderer?.syncTimebase()
pipController?.updatePlaybackState()
}
func pipController(_ controller: PiPController, restoreUserInterfaceForPictureInPictureStop completionHandler: @escaping (Bool) -> Void) {
completionHandler(true)
}
func pipControllerPlay(_ controller: PiPController) {
play()
}
func pipControllerPause(_ controller: PiPController) {
pause()
}
func pipController(_ controller: PiPController, skipByInterval interval: CMTime) {
let seconds = CMTimeGetSeconds(interval)
let target = max(0, cachedPosition + seconds)
seekTo(position: target)
}
func pipControllerIsPlaying(_ controller: PiPController) -> Bool {
return intendedPlayState
}
func pipControllerDuration(_ controller: PiPController) -> Double {
return getDuration()
}
func pipController(_ controller: PiPController, willStartPictureInPicture: Bool) {
print("PiP will start")
// Sync timebase before PiP starts for smooth transition
renderer?.syncTimebase()
// Set current time for PiP progress bar
pipController?.setCurrentTimeFromSeconds(cachedPosition, duration: cachedDuration)
}
func pipController(_ controller: PiPController, didStartPictureInPicture: Bool) {
print("PiP did start: \(didStartPictureInPicture)")
// Ensure current time is synced when PiP starts
pipController?.setCurrentTimeFromSeconds(cachedPosition, duration: cachedDuration)
}
func pipController(_ controller: PiPController, willStopPictureInPicture: Bool) {
print("PiP will stop")
// Sync timebase before returning from PiP
renderer?.syncTimebase()
}
func pipController(_ controller: PiPController, didStopPictureInPicture: Bool) {
print("PiP did stop")
// Ensure timebase is synced after PiP ends
renderer?.syncTimebase()
pipController?.updatePlaybackState()
}
func pipController(_ controller: PiPController, restoreUserInterfaceForPictureInPictureStop completionHandler: @escaping (Bool) -> Void) {
print("PiP restore user interface")
completionHandler(true)
}
func pipControllerPlay(_ controller: PiPController) {
print("PiP play requested")
intendedPlayState = true
renderer?.play()
pipController?.setPlaybackRate(1.0)
}
func pipControllerPause(_ controller: PiPController) {
print("PiP pause requested")
intendedPlayState = false
renderer?.pausePlayback()
pipController?.setPlaybackRate(0.0)
}
func pipController(_ controller: PiPController, skipByInterval interval: CMTime) {
let seconds = CMTimeGetSeconds(interval)
print("PiP skip by interval: \(seconds)")
let target = max(0, cachedPosition + seconds)
seekTo(position: target)
}
func pipControllerIsPlaying(_ controller: PiPController) -> Bool {
// Use intended state to ignore transient pauses during seeking
return intendedPlayState
}
func pipControllerDuration(_ controller: PiPController) -> Double {
return getDuration()
}
func pipControllerCurrentPosition(_ controller: PiPController) -> Double {
return getCurrentPosition()
}
}

View File

@@ -12,6 +12,7 @@ protocol PiPControllerDelegate: AnyObject {
func pipController(_ controller: PiPController, skipByInterval interval: CMTime)
func pipControllerIsPlaying(_ controller: PiPController) -> Bool
func pipControllerDuration(_ controller: PiPController) -> Double
func pipControllerCurrentPosition(_ controller: PiPController) -> Double
}
final class PiPController: NSObject {
@@ -20,6 +21,13 @@ final class PiPController: NSObject {
weak var delegate: PiPControllerDelegate?
// Timebase for PiP progress tracking
private var timebase: CMTimebase?
// Track current time for PiP progress
private var currentTime: CMTime = .zero
private var currentDuration: Double = 0
var isPictureInPictureSupported: Bool {
return AVPictureInPictureController.isPictureInPictureSupported()
}
@@ -35,9 +43,29 @@ final class PiPController: NSObject {
init(sampleBufferDisplayLayer: AVSampleBufferDisplayLayer) {
self.sampleBufferDisplayLayer = sampleBufferDisplayLayer
super.init()
setupTimebase()
setupPictureInPicture()
}
private func setupTimebase() {
// Create a timebase for tracking playback time
var newTimebase: CMTimebase?
let status = CMTimebaseCreateWithSourceClock(
allocator: kCFAllocatorDefault,
sourceClock: CMClockGetHostTimeClock(),
timebaseOut: &newTimebase
)
if status == noErr, let tb = newTimebase {
timebase = tb
CMTimebaseSetTime(tb, time: .zero)
CMTimebaseSetRate(tb, rate: 0) // Start paused
// Set the control timebase on the display layer
sampleBufferDisplayLayer?.controlTimebase = tb
}
}
private func setupPictureInPicture() {
guard isPictureInPictureSupported,
let displayLayer = sampleBufferDisplayLayer else {
@@ -81,6 +109,9 @@ final class PiPController: NSObject {
}
func updatePlaybackState() {
// Only invalidate when PiP is active to avoid "no context menu visible" warnings
guard isPictureInPictureActive else { return }
if Thread.isMainThread {
pipController?.invalidatePlaybackState()
} else {
@@ -89,6 +120,36 @@ final class PiPController: NSObject {
}
}
}
/// Updates the current playback time for PiP progress display
func setCurrentTime(_ time: CMTime) {
currentTime = time
// Update the timebase to reflect current position
if let tb = timebase {
CMTimebaseSetTime(tb, time: time)
}
// Only invalidate when PiP is active to avoid unnecessary updates
if isPictureInPictureActive {
updatePlaybackState()
}
}
/// Updates the current playback time from seconds
func setCurrentTimeFromSeconds(_ seconds: Double, duration: Double) {
guard seconds >= 0 else { return }
currentDuration = duration
let time = CMTime(seconds: seconds, preferredTimescale: 1000)
setCurrentTime(time)
}
/// Updates the playback rate on the timebase (1.0 = playing, 0.0 = paused)
func setPlaybackRate(_ rate: Float) {
if let tb = timebase {
CMTimebaseSetRate(tb, rate: Float64(rate))
}
}
}
// MARK: - AVPictureInPictureControllerDelegate
@@ -103,7 +164,7 @@ extension PiPController: AVPictureInPictureControllerDelegate {
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) {
Logger.shared.log("Failed to start PiP: \(error.localizedDescription)", type: "Error")
print("Failed to start PiP: \(error)")
delegate?.pipController(self, didStartPictureInPicture: false)
}
@@ -169,4 +230,4 @@ extension PiPController: AVPictureInPictureSampleBufferPlaybackDelegate {
}
completion()
}
}
}

View File

@@ -0,0 +1,72 @@
import UIKit
import AVFoundation
final class SampleBufferDisplayView: UIView {
override class var layerClass: AnyClass { AVSampleBufferDisplayLayer.self }
var displayLayer: AVSampleBufferDisplayLayer {
return layer as! AVSampleBufferDisplayLayer
}
private(set) var pipController: PiPController?
weak var pipDelegate: PiPControllerDelegate? {
didSet {
pipController?.delegate = pipDelegate
}
}
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
commonInit()
}
private func commonInit() {
backgroundColor = .black
displayLayer.videoGravity = .resizeAspect
#if !os(tvOS)
#if compiler(>=6.0)
if #available(iOS 26.0, *) {
displayLayer.preferredDynamicRange = .automatic
} else if #available(iOS 17.0, *) {
displayLayer.wantsExtendedDynamicRangeContent = true
}
#endif
if #available(iOS 17.0, *) {
displayLayer.wantsExtendedDynamicRangeContent = true
}
#endif
setupPictureInPicture()
}
private func setupPictureInPicture() {
pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
}
// MARK: - PiP Control Methods
func startPictureInPicture() {
pipController?.startPictureInPicture()
}
func stopPictureInPicture() {
pipController?.stopPictureInPicture()
}
var isPictureInPictureSupported: Bool {
return pipController?.isPictureInPictureSupported ?? false
}
var isPictureInPictureActive: Bool {
return pipController?.isPictureInPictureActive ?? false
}
var isPictureInPicturePossible: Bool {
return pipController?.isPictureInPicturePossible ?? false
}
}

View File

@@ -23,6 +23,14 @@ export type OnErrorEventPayload = {
export type OnTracksReadyEventPayload = Record<string, never>;
export type MpvPlayerModuleEvents = {
onChange: (params: ChangeEventPayload) => void;
};
export type ChangeEventPayload = {
value: string;
};
export type VideoSource = {
url: string;
headers?: Record<string, string>;

View File

@@ -0,0 +1,11 @@
import { NativeModule, requireNativeModule } from "expo";
import { MpvPlayerModuleEvents } from "./MpvPlayer.types";
declare class MpvPlayerModule extends NativeModule<MpvPlayerModuleEvents> {
hello(): string;
setValueAsync(value: string): Promise<void>;
}
// This call loads the native module object from the JSI.
export default requireNativeModule<MpvPlayerModule>("MpvPlayer");

View File

@@ -0,0 +1,19 @@
import { NativeModule, registerWebModule } from "expo";
import { ChangeEventPayload } from "./MpvPlayer.types";
type MpvPlayerModuleEvents = {
onChange: (params: ChangeEventPayload) => void;
};
class MpvPlayerModule extends NativeModule<MpvPlayerModuleEvents> {
PI = Math.PI;
async setValueAsync(value: string): Promise<void> {
this.emit("onChange", { value });
}
hello() {
return "Hello world! 👋";
}
}
export default registerWebModule(MpvPlayerModule, "MpvPlayerModule");

View File

@@ -28,16 +28,16 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
await nativeRef.current?.setSpeed(speed);
},
getSpeed: async () => {
return (await nativeRef.current?.getSpeed()) ?? 1.0;
return await nativeRef.current?.getSpeed();
},
isPaused: async () => {
return (await nativeRef.current?.isPaused()) ?? true;
return await nativeRef.current?.isPaused();
},
getCurrentPosition: async () => {
return (await nativeRef.current?.getCurrentPosition()) ?? 0;
return await nativeRef.current?.getCurrentPosition();
},
getDuration: async () => {
return (await nativeRef.current?.getDuration()) ?? 0;
return await nativeRef.current?.getDuration();
},
startPictureInPicture: async () => {
await nativeRef.current?.startPictureInPicture();
@@ -46,15 +46,13 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
await nativeRef.current?.stopPictureInPicture();
},
isPictureInPictureSupported: async () => {
return (
(await nativeRef.current?.isPictureInPictureSupported()) ?? false
);
return await nativeRef.current?.isPictureInPictureSupported();
},
isPictureInPictureActive: async () => {
return (await nativeRef.current?.isPictureInPictureActive()) ?? false;
return await nativeRef.current?.isPictureInPictureActive();
},
getSubtitleTracks: async () => {
return (await nativeRef.current?.getSubtitleTracks()) ?? [];
return await nativeRef.current?.getSubtitleTracks();
},
setSubtitleTrack: async (trackId: number) => {
await nativeRef.current?.setSubtitleTrack(trackId);
@@ -63,7 +61,7 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
await nativeRef.current?.disableSubtitles();
},
getCurrentSubtitleTrack: async () => {
return (await nativeRef.current?.getCurrentSubtitleTrack()) ?? 0;
return await nativeRef.current?.getCurrentSubtitleTrack();
},
addSubtitleFile: async (url: string, select = true) => {
await nativeRef.current?.addSubtitleFile(url, select);
@@ -86,14 +84,15 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
setSubtitleFontSize: async (size: number) => {
await nativeRef.current?.setSubtitleFontSize(size);
},
// Audio controls
getAudioTracks: async () => {
return (await nativeRef.current?.getAudioTracks()) ?? [];
return await nativeRef.current?.getAudioTracks();
},
setAudioTrack: async (trackId: number) => {
await nativeRef.current?.setAudioTrack(trackId);
},
getCurrentAudioTrack: async () => {
return (await nativeRef.current?.getCurrentAudioTrack()) ?? 0;
return await nativeRef.current?.getCurrentAudioTrack();
},
}));

View File

@@ -0,0 +1,14 @@
import { MpvPlayerViewProps } from "./MpvPlayer.types";
export default function MpvPlayerView(props: MpvPlayerViewProps) {
return (
<div>
<iframe
title='MPV Player'
style={{ flex: 1 }}
src={props.url}
onLoad={() => props.onLoad({ nativeEvent: { url: props.url } })}
/>
</div>
);
}

View File

@@ -1,2 +1,3 @@
export * from "./MpvPlayer.types";
export { default as MpvPlayerModule } from "./MpvPlayerModule";
export { default as MpvPlayerView } from "./MpvPlayerView";

24
plugins/withGitPod.js Normal file
View File

@@ -0,0 +1,24 @@
const { withPodfile } = require("@expo/config-plugins");
const withGitPod = (config, { podName, podspecUrl }) => {
return withPodfile(config, (config) => {
const podfile = config.modResults.contents;
const podLine = ` pod '${podName}', :podspec => '${podspecUrl}'`;
// Check if already added
if (podfile.includes(podLine)) {
return config;
}
// Insert after "use_expo_modules!"
config.modResults.contents = podfile.replace(
"use_expo_modules!",
`use_expo_modules!\n${podLine}`,
);
return config;
});
};
module.exports = withGitPod;

View File

@@ -96,6 +96,7 @@ export const WebSocketProvider = ({ children }: WebSocketProviderProps) => {
newWebSocket.onmessage = (e) => {
try {
const message = JSON.parse(e.data);
console.log("[WS] Received message:", message);
setLastMessage(message); // Store the last message in context
} catch (error) {
console.error("Error parsing WebSocket message:", error);
@@ -123,10 +124,12 @@ export const WebSocketProvider = ({ children }: WebSocketProviderProps) => {
const handlePlayCommand = useCallback(
(data: any) => {
if (!data || !data.ItemIds || !data.ItemIds.length) {
console.warn("[WS] Received Play command with no items");
return;
}
const itemId = data.ItemIds[0];
console.log(`[WS] Handling Play command for item: ${itemId}`);
router.push({
pathname: "/(auth)/player/direct-player",