Compare commits

...

2 Commits

Author SHA1 Message Date
Fredrik Burmester
e1769cbd59 wip 2025-12-08 10:44:42 +01:00
Fredrik Burmester
2b0e238799 wip 2025-12-07 12:36:37 +01:00
22 changed files with 782 additions and 1009 deletions

View File

@@ -73,12 +73,6 @@ export const ItemContent: React.FC<ItemContentProps> = React.memo(
defaultSubtitleIndex, defaultSubtitleIndex,
} = useDefaultPlaySettings(item, settings); } = useDefaultPlaySettings(item, settings);
console.log("defaultMediaSource", {
defaultAudioIndex,
defaultBitrate,
defaultSubtitleIndex,
});
const logoUrl = useMemo( const logoUrl = useMemo(
() => (item ? getLogoImageUrlById({ api, item }) : null), () => (item ? getLogoImageUrlById({ api, item }) : null),
[api, item], [api, item],

View File

@@ -104,11 +104,6 @@ export const MediaSourceButton: React.FC<Props> = ({
// Audio track group // Audio track group
if (audioStreams.length > 0) { if (audioStreams.length > 0) {
console.log("Audio comparison:", {
selectedAudioIndex: selectedOptions.audioIndex,
streamIndices: audioStreams.map((s) => s.Index),
});
groups.push({ groups.push({
title: t("item_card.audio"), title: t("item_card.audio"),
options: audioStreams.map((stream) => ({ options: audioStreams.map((stream) => ({

View File

@@ -280,7 +280,6 @@ export const PlayButton: React.FC<Props> = ({
]); ]);
const onPress = useCallback(async () => { const onPress = useCallback(async () => {
console.log("onPress");
if (!item) return; if (!item) return;
lightHapticFeedback(); lightHapticFeedback();

View File

@@ -59,7 +59,6 @@ export const PlayButton: React.FC<Props> = ({
); );
const onPress = () => { const onPress = () => {
console.log("onpress");
if (!item) return; if (!item) return;
lightHapticFeedback(); lightHapticFeedback();

View File

@@ -12,12 +12,6 @@ const useDefaultPlaySettings = (item: BaseItemDto, settings: Settings | null) =>
const { mediaSource, audioIndex, subtitleIndex, bitrate } = const { mediaSource, audioIndex, subtitleIndex, bitrate } =
getDefaultPlaySettings(item, settings); getDefaultPlaySettings(item, settings);
console.log("defaultPlaySettings", {
audioIndex,
subtitleIndex,
bitrate,
});
return { return {
defaultMediaSource: mediaSource, defaultMediaSource: mediaSource,
defaultAudioIndex: audioIndex, defaultAudioIndex: audioIndex,

View File

@@ -96,8 +96,6 @@ export const useWebSocket = ({
| Record<string, string> | Record<string, string>
| undefined; // Arguments are Dictionary<string, string> | undefined; // Arguments are Dictionary<string, string>
console.log("[WS] ~ ", lastMessage);
if (command === "PlayPause") { if (command === "PlayPause") {
console.log("Command ~ PlayPause"); console.log("Command ~ PlayPause");
togglePlay(); togglePlay();

View File

@@ -1,9 +1,6 @@
{ {
"platforms": ["apple", "android", "web"], "platforms": ["apple"],
"apple": { "apple": {
"modules": ["MpvPlayerModule"] "modules": ["MpvPlayerModule"]
},
"android": {
"modules": ["expo.modules.mpvplayer.MpvPlayerModule"]
} }
} }

View File

@@ -1,6 +1,2 @@
// Reexport the native module. On web, it will be resolved to MpvPlayerModule.web.ts
// and on native platforms to MpvPlayerModule.ts
export * from "./src/MpvPlayer.types"; export * from "./src/MpvPlayer.types";
export { default } from "./src/MpvPlayerModule";
export { default as MpvPlayerView } from "./src/MpvPlayerView"; export { default as MpvPlayerView } from "./src/MpvPlayerView";

View File

@@ -0,0 +1,245 @@
import Foundation
import CoreVideo
import Metal
import CoreMedia
import AVFoundation
/// Manages a pool of IOSurface-backed CVPixelBuffers that can be shared between Metal and AVFoundation
/// This enables zero-copy rendering where mpv renders to Metal textures that are directly usable by AVSampleBufferDisplayLayer
final class IOSurfaceBufferPool {
struct PooledBuffer {
let pixelBuffer: CVPixelBuffer
let texture: MTLTexture
let ioSurface: IOSurfaceRef
}
private let device: MTLDevice
private var pool: CVPixelBufferPool?
private var buffers: [PooledBuffer] = []
private var availableBuffers: [PooledBuffer] = []
private let lock = NSLock()
private(set) var width: Int = 0
private(set) var height: Int = 0
private(set) var pixelFormat: OSType = kCVPixelFormatType_32BGRA
private let maxBufferCount: Int
init(device: MTLDevice, maxBufferCount: Int = 3) {
self.device = device
self.maxBufferCount = maxBufferCount
}
deinit {
invalidate()
}
/// Configure the pool for a specific video size and format
func configure(width: Int, height: Int, pixelFormat: OSType = kCVPixelFormatType_32BGRA) -> Bool {
lock.lock()
defer { lock.unlock() }
guard width > 0, height > 0 else { return false }
// Skip if already configured for this size
if self.width == width && self.height == height && self.pixelFormat == pixelFormat && pool != nil {
return true
}
// Clear existing buffers
buffers.removeAll()
availableBuffers.removeAll()
pool = nil
self.width = width
self.height = height
self.pixelFormat = pixelFormat
// Create pixel buffer pool with IOSurface and Metal compatibility
let pixelBufferAttributes: [CFString: Any] = [
kCVPixelBufferPixelFormatTypeKey: pixelFormat,
kCVPixelBufferWidthKey: width,
kCVPixelBufferHeightKey: height,
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary,
kCVPixelBufferMetalCompatibilityKey: true,
kCVPixelBufferCGImageCompatibilityKey: true,
kCVPixelBufferCGBitmapContextCompatibilityKey: true
]
let poolAttributes: [CFString: Any] = [
kCVPixelBufferPoolMinimumBufferCountKey: maxBufferCount
]
var newPool: CVPixelBufferPool?
let status = CVPixelBufferPoolCreate(
kCFAllocatorDefault,
poolAttributes as CFDictionary,
pixelBufferAttributes as CFDictionary,
&newPool
)
guard status == kCVReturnSuccess, let createdPool = newPool else {
Logger.shared.log("Failed to create IOSurface buffer pool: \(status)", type: "Error")
return false
}
pool = createdPool
// Pre-allocate buffers
for _ in 0..<maxBufferCount {
if let buffer = createPooledBuffer() {
buffers.append(buffer)
availableBuffers.append(buffer)
}
}
return true
}
/// Get an available buffer for rendering
func dequeueBuffer() -> PooledBuffer? {
lock.lock()
defer { lock.unlock() }
if let buffer = availableBuffers.popLast() {
return buffer
}
// Try to create a new buffer if under limit
if buffers.count < maxBufferCount, let buffer = createPooledBuffer() {
buffers.append(buffer)
return buffer
}
// All buffers in use - create temporary one
return createPooledBuffer()
}
/// Return a buffer to the pool after use
func enqueueBuffer(_ buffer: PooledBuffer) {
lock.lock()
defer { lock.unlock() }
if buffers.contains(where: { $0.pixelBuffer == buffer.pixelBuffer }) {
availableBuffers.append(buffer)
}
}
/// Clear all buffers and reset the pool
func invalidate() {
lock.lock()
defer { lock.unlock() }
buffers.removeAll()
availableBuffers.removeAll()
pool = nil
width = 0
height = 0
}
private func createPooledBuffer() -> PooledBuffer? {
guard let pool = pool else { return nil }
var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer)
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
Logger.shared.log("Failed to create pixel buffer from pool: \(status)", type: "Error")
return nil
}
// Get IOSurface from pixel buffer
guard let ioSurface = CVPixelBufferGetIOSurface(buffer)?.takeUnretainedValue() else {
Logger.shared.log("Failed to get IOSurface from pixel buffer", type: "Error")
return nil
}
// Create Metal texture from IOSurface
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: metalPixelFormat(for: pixelFormat),
width: width,
height: height,
mipmapped: false
)
textureDescriptor.usage = [.renderTarget, .shaderRead, .shaderWrite]
textureDescriptor.storageMode = .shared
guard let texture = device.makeTexture(descriptor: textureDescriptor, iosurface: ioSurface, plane: 0) else {
Logger.shared.log("Failed to create Metal texture from IOSurface", type: "Error")
return nil
}
return PooledBuffer(pixelBuffer: buffer, texture: texture, ioSurface: ioSurface)
}
private func metalPixelFormat(for cvFormat: OSType) -> MTLPixelFormat {
switch cvFormat {
case kCVPixelFormatType_32BGRA:
return .bgra8Unorm
case kCVPixelFormatType_32RGBA:
return .rgba8Unorm
case kCVPixelFormatType_64RGBAHalf:
return .rgba16Float
default:
return .bgra8Unorm
}
}
}
// MARK: - CMSampleBuffer Creation
extension IOSurfaceBufferPool {
/// Create a CMSampleBuffer from a pooled buffer for AVSampleBufferDisplayLayer
static func createSampleBuffer(
from pixelBuffer: CVPixelBuffer,
formatDescription: CMVideoFormatDescription,
presentationTime: CMTime
) -> CMSampleBuffer? {
var timing = CMSampleTimingInfo(
duration: .invalid,
presentationTimeStamp: presentationTime,
decodeTimeStamp: .invalid
)
var sampleBuffer: CMSampleBuffer?
let status = CMSampleBufferCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: pixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: formatDescription,
sampleTiming: &timing,
sampleBufferOut: &sampleBuffer
)
guard status == noErr else {
Logger.shared.log("Failed to create sample buffer: \(status)", type: "Error")
return nil
}
return sampleBuffer
}
/// Create a format description for the current pool configuration
func createFormatDescription() -> CMVideoFormatDescription? {
guard let buffer = dequeueBuffer() else { return nil }
defer { enqueueBuffer(buffer) }
var formatDescription: CMVideoFormatDescription?
let status = CMVideoFormatDescriptionCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: buffer.pixelBuffer,
formatDescriptionOut: &formatDescription
)
guard status == noErr else {
Logger.shared.log("Failed to create format description: \(status)", type: "Error")
return nil
}
return formatDescription
}
}

View File

@@ -1,6 +1,6 @@
import Foundation import Foundation
class Logger { final class Logger {
static let shared = Logger() static let shared = Logger()
struct LogEntry { struct LogEntry {
@@ -12,6 +12,7 @@ class Logger {
private let queue = DispatchQueue(label: "mpvkit.logger", attributes: .concurrent) private let queue = DispatchQueue(label: "mpvkit.logger", attributes: .concurrent)
private var logs: [LogEntry] = [] private var logs: [LogEntry] = []
private let logFileURL: URL private let logFileURL: URL
private let dateFormatter: DateFormatter
private let maxFileSize = 1024 * 512 private let maxFileSize = 1024 * 512
private let maxLogEntries = 1000 private let maxLogEntries = 1000
@@ -19,12 +20,17 @@ class Logger {
private init() { private init() {
let tmpDir = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true) let tmpDir = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
logFileURL = tmpDir.appendingPathComponent("logs.txt") logFileURL = tmpDir.appendingPathComponent("logs.txt")
dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
} }
func log(_ message: String, type: String = "General") { func log(_ message: String, type: String = "General") {
let entry = LogEntry(message: message, type: type, timestamp: Date()) let entry = LogEntry(message: message, type: type, timestamp: Date())
queue.async(flags: .barrier) { queue.async(flags: .barrier) { [weak self] in
guard let self else { return }
self.logs.append(entry) self.logs.append(entry)
if self.logs.count > self.maxLogEntries { if self.logs.count > self.maxLogEntries {
@@ -32,15 +38,20 @@ class Logger {
} }
self.saveLogToFile(entry) self.saveLogToFile(entry)
#if DEBUG
self.debugLog(entry) self.debugLog(entry)
#endif
DispatchQueue.main.async { DispatchQueue.main.async {
NotificationCenter.default.post(name: NSNotification.Name("LoggerNotification"), object: nil, NotificationCenter.default.post(
userInfo: [ name: NSNotification.Name("LoggerNotification"),
"message": message, object: nil,
"type": type, userInfo: [
"timestamp": entry.timestamp "message": message,
] "type": type,
"timestamp": entry.timestamp
]
) )
} }
} }
@@ -49,8 +60,6 @@ class Logger {
func getLogs() -> String { func getLogs() -> String {
var result = "" var result = ""
queue.sync { queue.sync {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
result = logs.map { "[\(dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" } result = logs.map { "[\(dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
.joined(separator: "\n----\n") .joined(separator: "\n----\n")
} }
@@ -59,10 +68,12 @@ class Logger {
func getLogsAsync() async -> String { func getLogsAsync() async -> String {
return await withCheckedContinuation { continuation in return await withCheckedContinuation { continuation in
queue.async { queue.async { [weak self] in
let dateFormatter = DateFormatter() guard let self else {
dateFormatter.dateFormat = "dd-MM HH:mm:ss" continuation.resume(returning: "")
let result = self.logs.map { "[\(dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" } return
}
let result = self.logs.map { "[\(self.dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
.joined(separator: "\n----\n") .joined(separator: "\n----\n")
continuation.resume(returning: result) continuation.resume(returning: result)
} }
@@ -70,7 +81,8 @@ class Logger {
} }
func clearLogs() { func clearLogs() {
queue.async(flags: .barrier) { queue.async(flags: .barrier) { [weak self] in
guard let self else { return }
self.logs.removeAll() self.logs.removeAll()
try? FileManager.default.removeItem(at: self.logFileURL) try? FileManager.default.removeItem(at: self.logFileURL)
} }
@@ -78,7 +90,11 @@ class Logger {
func clearLogsAsync() async { func clearLogsAsync() async {
await withCheckedContinuation { continuation in await withCheckedContinuation { continuation in
queue.async(flags: .barrier) { queue.async(flags: .barrier) { [weak self] in
guard let self else {
continuation.resume()
return
}
self.logs.removeAll() self.logs.removeAll()
try? FileManager.default.removeItem(at: self.logFileURL) try? FileManager.default.removeItem(at: self.logFileURL)
continuation.resume() continuation.resume()
@@ -87,13 +103,9 @@ class Logger {
} }
private func saveLogToFile(_ log: LogEntry) { private func saveLogToFile(_ log: LogEntry) {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
let logString = "[\(dateFormatter.string(from: log.timestamp))] [\(log.type)] \(log.message)\n---\n" let logString = "[\(dateFormatter.string(from: log.timestamp))] [\(log.type)] \(log.message)\n---\n"
guard let data = logString.data(using: .utf8) else { guard let data = logString.data(using: .utf8) else {
print("Failed to encode log string to UTF-8")
return return
} }
@@ -115,7 +127,6 @@ class Logger {
try data.write(to: logFileURL) try data.write(to: logFileURL)
} }
} catch { } catch {
print("Error managing log file: \(error)")
try? data.write(to: logFileURL) try? data.write(to: logFileURL)
} }
} }
@@ -138,17 +149,14 @@ class Logger {
try truncatedData.write(to: logFileURL) try truncatedData.write(to: logFileURL)
} }
} catch { } catch {
print("Error truncating log file: \(error)")
try? FileManager.default.removeItem(at: logFileURL) try? FileManager.default.removeItem(at: logFileURL)
} }
} }
#if DEBUG
private func debugLog(_ entry: LogEntry) { private func debugLog(_ entry: LogEntry) {
#if DEBUG
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
let formattedMessage = "[\(dateFormatter.string(from: entry.timestamp))] [\(entry.type)] \(entry.message)" let formattedMessage = "[\(dateFormatter.string(from: entry.timestamp))] [\(entry.type)] \(entry.message)"
print(formattedMessage) NSLog("%@", formattedMessage)
#endif
} }
#endif
} }

View File

@@ -1,45 +1,41 @@
import UIKit import UIKit
import Metal
import Libmpv import Libmpv
import CoreMedia import CoreMedia
import CoreVideo import CoreVideo
import AVFoundation import AVFoundation
protocol MPVSoftwareRendererDelegate: AnyObject { protocol MPVMetalRendererDelegate: AnyObject {
func renderer(_ renderer: MPVSoftwareRenderer, didUpdatePosition position: Double, duration: Double) func renderer(_ renderer: MPVMetalRenderer, didUpdatePosition position: Double, duration: Double)
func renderer(_ renderer: MPVSoftwareRenderer, didChangePause isPaused: Bool) func renderer(_ renderer: MPVMetalRenderer, didChangePause isPaused: Bool)
func renderer(_ renderer: MPVSoftwareRenderer, didChangeLoading isLoading: Bool) func renderer(_ renderer: MPVMetalRenderer, didChangeLoading isLoading: Bool)
func renderer(_ renderer: MPVSoftwareRenderer, didBecomeReadyToSeek: Bool) func renderer(_ renderer: MPVMetalRenderer, didBecomeReadyToSeek: Bool)
func renderer(_ renderer: MPVSoftwareRenderer, didBecomeTracksReady: Bool) func renderer(_ renderer: MPVMetalRenderer, didBecomeTracksReady: Bool)
} }
final class MPVSoftwareRenderer { final class MPVMetalRenderer {
enum RendererError: Error { enum RendererError: Error {
case metalNotSupported
case mpvCreationFailed case mpvCreationFailed
case mpvInitialization(Int32) case mpvInitialization(Int32)
case renderContextCreation(Int32) case renderContextCreation(Int32)
} }
private let displayLayer: AVSampleBufferDisplayLayer private let displayLayer: AVSampleBufferDisplayLayer
private let renderQueue = DispatchQueue(label: "mpv.software.render", qos: .userInitiated) private let renderQueue = DispatchQueue(label: "mpv.metal.render", qos: .userInteractive)
private let eventQueue = DispatchQueue(label: "mpv.software.events", qos: .utility) private let eventQueue = DispatchQueue(label: "mpv.metal.events", qos: .utility)
private let stateQueue = DispatchQueue(label: "mpv.software.state", attributes: .concurrent) private let stateQueue = DispatchQueue(label: "mpv.metal.state", attributes: .concurrent)
private let eventQueueGroup = DispatchGroup() private let eventQueueGroup = DispatchGroup()
private let renderQueueKey = DispatchSpecificKey<Void>() private let renderQueueKey = DispatchSpecificKey<Void>()
private var dimensionsArray = [Int32](repeating: 0, count: 2) private var device: MTLDevice?
private var renderParams = [mpv_render_param](repeating: mpv_render_param(type: MPV_RENDER_PARAM_INVALID, data: nil), count: 5) private var commandQueue: MTLCommandQueue?
private var bufferPool: IOSurfaceBufferPool?
private var formatDescription: CMVideoFormatDescription?
private var mpv: OpaquePointer? private var mpv: OpaquePointer?
private var renderContext: OpaquePointer? private var renderContext: OpaquePointer?
private var videoSize: CGSize = .zero private var videoSize: CGSize = .zero
private var pixelBufferPool: CVPixelBufferPool?
private var pixelBufferPoolAuxAttributes: CFDictionary?
private var formatDescription: CMVideoFormatDescription?
private var didFlushForFormatChange = false
private var poolWidth: Int = 0
private var poolHeight: Int = 0
private var preAllocatedBuffers: [CVPixelBuffer] = []
private let maxPreAllocatedBuffers = 12
private var currentPreset: PlayerPreset? private var currentPreset: PlayerPreset?
private var currentURL: URL? private var currentURL: URL?
@@ -52,22 +48,17 @@ final class MPVSoftwareRenderer {
private var isRunning = false private var isRunning = false
private var isStopping = false private var isStopping = false
private var shouldClearPixelBuffer = false
private let bgraFormatCString: [CChar] = Array("bgra\0".utf8CString)
private let maxInFlightBuffers = 3
private var inFlightBufferCount = 0
private let inFlightLock = NSLock()
weak var delegate: MPVSoftwareRendererDelegate? weak var delegate: MPVMetalRendererDelegate?
// Thread-safe state for playback (uses existing stateQueue to prevent races causing stutter) // Thread-safe state
private var _cachedDuration: Double = 0 private var _cachedDuration: Double = 0
private var _cachedPosition: Double = 0 private var _cachedPosition: Double = 0
private var _isPaused: Bool = true private var _isPaused: Bool = true
private var _playbackSpeed: Double = 1.0 private var _playbackSpeed: Double = 1.0
private var _isSeeking: Bool = false private var _isSeeking: Bool = false
private var _positionUpdateTime: CFTimeInterval = 0 // Host time when position was last updated private var _positionUpdateTime: CFTimeInterval = 0
private var _lastPTS: Double = 0 // Last presentation timestamp (ensures monotonic increase) private var _lastPTS: Double = 0
// Thread-safe accessors // Thread-safe accessors
private var cachedDuration: Double { private var cachedDuration: Double {
@@ -99,33 +90,6 @@ final class MPVSoftwareRenderer {
set { stateQueue.async(flags: .barrier) { self._lastPTS = newValue } } set { stateQueue.async(flags: .barrier) { self._lastPTS = newValue } }
} }
/// Get next monotonically increasing PTS based on video position
/// This ensures frames always have increasing timestamps (prevents stutter from drops)
private func nextMonotonicPTS() -> Double {
let currentPos = interpolatedPosition()
let last = lastPTS
// Ensure PTS always increases (by at least 1ms) to prevent frame drops
let pts = max(currentPos, last + 0.001)
lastPTS = pts
return pts
}
/// Calculate smooth interpolated position based on last known position + elapsed time
private func interpolatedPosition() -> Double {
let basePosition = cachedPosition
let lastUpdate = positionUpdateTime
let paused = isPaused
let speed = playbackSpeed
guard !paused, lastUpdate > 0 else {
return basePosition
}
let elapsed = CACurrentMediaTime() - lastUpdate
return basePosition + (elapsed * speed)
}
private var isLoading: Bool = false private var isLoading: Bool = false
private var isRenderScheduled = false private var isRenderScheduled = false
private var lastRenderTime: CFTimeInterval = 0 private var lastRenderTime: CFTimeInterval = 0
@@ -137,15 +101,22 @@ final class MPVSoftwareRenderer {
return isPaused return isPaused
} }
init(displayLayer: AVSampleBufferDisplayLayer) { init(displayLayer: AVSampleBufferDisplayLayer) throws {
guard guard let device = MTLCreateSystemDefaultDevice() else {
let screen = UIApplication.shared.connectedScenes throw RendererError.metalNotSupported
.compactMap({ ($0 as? UIWindowScene)?.screen })
.first
else {
fatalError("⚠️ No active screen found — app may not have a visible window yet.")
} }
self.device = device
self.commandQueue = device.makeCommandQueue()
self.displayLayer = displayLayer self.displayLayer = displayLayer
self.bufferPool = IOSurfaceBufferPool(device: device, maxBufferCount: 6)
guard let screen = UIApplication.shared.connectedScenes
.compactMap({ ($0 as? UIWindowScene)?.screen })
.first
else {
throw RendererError.metalNotSupported
}
let maxFPS = screen.maximumFramesPerSecond let maxFPS = screen.maximumFramesPerSecond
let cappedFPS = min(maxFPS, 60) let cappedFPS = min(maxFPS, 60)
self.minRenderInterval = 1.0 / CFTimeInterval(cappedFPS) self.minRenderInterval = 1.0 / CFTimeInterval(cappedFPS)
@@ -162,27 +133,37 @@ final class MPVSoftwareRenderer {
throw RendererError.mpvCreationFailed throw RendererError.mpvCreationFailed
} }
mpv = handle mpv = handle
// Core options
setOption(name: "terminal", value: "yes") setOption(name: "terminal", value: "yes")
setOption(name: "msg-level", value: "status") setOption(name: "msg-level", value: "status")
setOption(name: "keep-open", value: "yes") setOption(name: "keep-open", value: "yes")
setOption(name: "idle", value: "yes") setOption(name: "idle", value: "yes")
setOption(name: "vo", value: "libmpv") setOption(name: "vo", value: "libmpv")
setOption(name: "hwdec", value: "videotoolbox-copy")
setOption(name: "gpu-api", value: "metal")
setOption(name: "gpu-context", value: "metal")
setOption(name: "demuxer-thread", value: "yes")
setOption(name: "ytdl", value: "yes")
setOption(name: "profile", value: "fast")
setOption(name: "vd-lavc-threads", value: "8")
setOption(name: "cache", value: "yes")
setOption(name: "demuxer-max-bytes", value: "150M")
setOption(name: "demuxer-readahead-secs", value: "20")
// Subtitle options - use vf=sub to burn subtitles into video frames // Hardware decoding - zero-copy for maximum GPU efficiency
// This happens at the filter level, BEFORE the software renderer setOption(name: "hwdec", value: "videotoolbox")
// Performance options
setOption(name: "demuxer-thread", value: "yes")
setOption(name: "profile", value: "fast")
setOption(name: "vd-lavc-threads", value: "0")
setOption(name: "cache", value: "yes")
setOption(name: "demuxer-max-bytes", value: "50M")
setOption(name: "demuxer-readahead-secs", value: "10")
// A/V sync options - prioritize audio sync and allow frame drops
setOption(name: "video-sync", value: "audio")
setOption(name: "framedrop", value: "vo")
setOption(name: "video-latency-hacks", value: "yes")
// Audio buffer to prevent underruns during heavy video load
setOption(name: "audio-buffer", value: "0.2")
// Subtitle options - burn into video frames
setOption(name: "vf", value: "sub") setOption(name: "vf", value: "sub")
setOption(name: "sub-visibility", value: "yes") setOption(name: "sub-visibility", value: "yes")
let initStatus = mpv_initialize(handle) let initStatus = mpv_initialize(handle)
guard initStatus >= 0 else { guard initStatus >= 0 else {
throw RendererError.mpvInitialization(initStatus) throw RendererError.mpvInitialization(initStatus)
@@ -221,11 +202,7 @@ final class MPVSoftwareRenderer {
} }
self.formatDescription = nil self.formatDescription = nil
self.preAllocatedBuffers.removeAll() self.bufferPool?.invalidate()
self.pixelBufferPool = nil
self.poolWidth = 0
self.poolHeight = 0
self.lastRenderDimensions = .zero
} }
eventQueueGroup.wait() eventQueueGroup.wait()
@@ -238,14 +215,6 @@ final class MPVSoftwareRenderer {
} }
self.mpv = nil self.mpv = nil
self.preAllocatedBuffers.removeAll()
self.pixelBufferPool = nil
self.pixelBufferPoolAuxAttributes = nil
self.formatDescription = nil
self.poolWidth = 0
self.poolHeight = 0
self.lastRenderDimensions = .zero
self.disposeBag.forEach { $0() } self.disposeBag.forEach { $0() }
self.disposeBag.removeAll() self.disposeBag.removeAll()
} }
@@ -257,6 +226,7 @@ final class MPVSoftwareRenderer {
} else { } else {
self.displayLayer.flushAndRemoveImage() self.displayLayer.flushAndRemoveImage()
} }
self.displayLayer.controlTimebase = nil
} }
isStopping = false isStopping = false
@@ -290,23 +260,19 @@ final class MPVSoftwareRenderer {
guard let handle = self.mpv else { return } guard let handle = self.mpv else { return }
self.apply(commands: preset.commands, on: handle) self.apply(commands: preset.commands, on: handle)
// Sync stop to ensure previous playback is stopped before loading new file
self.commandSync(handle, ["stop"]) self.commandSync(handle, ["stop"])
self.updateHTTPHeaders(headers) self.updateHTTPHeaders(headers)
// Set start position using property (setOption only works before mpv_initialize)
if let startPos = startPosition, startPos > 0 { if let startPos = startPosition, startPos > 0 {
self.setProperty(name: "start", value: String(format: "%.2f", startPos)) self.setProperty(name: "start", value: String(format: "%.2f", startPos))
} else { } else {
self.setProperty(name: "start", value: "0") self.setProperty(name: "start", value: "0")
} }
// Set initial audio track if specified
if let audioId = self.initialAudioId, audioId > 0 { if let audioId = self.initialAudioId, audioId > 0 {
self.setAudioTrack(audioId) self.setAudioTrack(audioId)
} }
// Set initial subtitle track if no external subs (external subs change track IDs)
if self.pendingExternalSubtitles.isEmpty { if self.pendingExternalSubtitles.isEmpty {
if let subId = self.initialSubtitleId { if let subId = self.initialSubtitleId {
self.setSubtitleTrack(subId) self.setSubtitleTrack(subId)
@@ -314,7 +280,6 @@ final class MPVSoftwareRenderer {
self.disableSubtitles() self.disableSubtitles()
} }
} else { } else {
// External subs will be added after file loads, set sid then
self.disableSubtitles() self.disableSubtitles()
} }
@@ -342,6 +307,8 @@ final class MPVSoftwareRenderer {
} }
} }
// MARK: - MPV Configuration
private func setOption(name: String, value: String) { private func setOption(name: String, value: String) {
guard let handle = mpv else { return } guard let handle = mpv else { return }
_ = value.withCString { valuePointer in _ = value.withCString { valuePointer in
@@ -380,16 +347,18 @@ final class MPVSoftwareRenderer {
} }
let headerString = headers let headerString = headers
.map { key, value in .map { key, value in "\(key): \(value)" }
"\(key): \(value)"
}
.joined(separator: "\r\n") .joined(separator: "\r\n")
setProperty(name: "http-header-fields", value: headerString) setProperty(name: "http-header-fields", value: headerString)
} }
// MARK: - Render Context
private func createRenderContext() throws { private func createRenderContext() throws {
guard let handle = mpv else { return } guard let handle = mpv else { return }
// Use software rendering API but with our IOSurface-backed Metal textures
// This gives us the frame data while still leveraging hardware decoding
var apiType = MPV_RENDER_API_TYPE_SW var apiType = MPV_RENDER_API_TYPE_SW
let status = withUnsafePointer(to: &apiType) { apiTypePtr in let status = withUnsafePointer(to: &apiType) { apiTypePtr in
var params = [ var params = [
@@ -410,7 +379,7 @@ final class MPVSoftwareRenderer {
mpv_render_context_set_update_callback(renderContext, { context in mpv_render_context_set_update_callback(renderContext, { context in
guard let context = context else { return } guard let context = context else { return }
let instance = Unmanaged<MPVSoftwareRenderer>.fromOpaque(context).takeUnretainedValue() let instance = Unmanaged<MPVMetalRenderer>.fromOpaque(context).takeUnretainedValue()
instance.scheduleRender() instance.scheduleRender()
}, Unmanaged.passUnretained(self).toOpaque()) }, Unmanaged.passUnretained(self).toOpaque())
} }
@@ -423,7 +392,7 @@ final class MPVSoftwareRenderer {
("duration", MPV_FORMAT_DOUBLE), ("duration", MPV_FORMAT_DOUBLE),
("time-pos", MPV_FORMAT_DOUBLE), ("time-pos", MPV_FORMAT_DOUBLE),
("pause", MPV_FORMAT_FLAG), ("pause", MPV_FORMAT_FLAG),
("track-list/count", MPV_FORMAT_INT64) // Notify when tracks are available ("track-list/count", MPV_FORMAT_INT64)
] ]
for (name, format) in properties { for (name, format) in properties {
@@ -437,7 +406,7 @@ final class MPVSoftwareRenderer {
guard let handle = mpv else { return } guard let handle = mpv else { return }
mpv_set_wakeup_callback(handle, { userdata in mpv_set_wakeup_callback(handle, { userdata in
guard let userdata else { return } guard let userdata else { return }
let instance = Unmanaged<MPVSoftwareRenderer>.fromOpaque(userdata).takeUnretainedValue() let instance = Unmanaged<MPVMetalRenderer>.fromOpaque(userdata).takeUnretainedValue()
instance.processEvents() instance.processEvents()
}, Unmanaged.passUnretained(self).toOpaque()) }, Unmanaged.passUnretained(self).toOpaque())
renderQueue.async { [weak self] in renderQueue.async { [weak self] in
@@ -449,6 +418,8 @@ final class MPVSoftwareRenderer {
} }
} }
// MARK: - Rendering
private func scheduleRender() { private func scheduleRender() {
renderQueue.async { [weak self] in renderQueue.async { [weak self] in
guard let self, self.isRunning, !self.isStopping else { return } guard let self, self.isRunning, !self.isStopping else { return }
@@ -491,82 +462,55 @@ final class MPVSoftwareRenderer {
} }
} }
private var dimensionsArray = [Int32](repeating: 0, count: 2)
private var renderParams = [mpv_render_param](repeating: mpv_render_param(type: MPV_RENDER_PARAM_INVALID, data: nil), count: 5)
private let bgraFormatCString: [CChar] = Array("bgra\0".utf8CString)
private func renderFrame() { private func renderFrame() {
guard let context = renderContext else { return } guard let context = renderContext, let bufferPool = bufferPool else { return }
let videoSize = currentVideoSize() let videoSize = currentVideoSize()
guard videoSize.width > 0, videoSize.height > 0 else { return } guard videoSize.width > 0, videoSize.height > 0 else { return }
let targetSize = targetRenderSize(for: videoSize) let width = Int(videoSize.width)
let width = Int(targetSize.width) let height = Int(videoSize.height)
let height = Int(targetSize.height)
guard width > 0, height > 0 else { return } guard width > 0, height > 0 else { return }
if lastRenderDimensions != targetSize {
lastRenderDimensions = targetSize // Configure buffer pool if needed
if targetSize != videoSize { if bufferPool.width != width || bufferPool.height != height {
Logger.shared.log("Rendering scaled output at \(width)x\(height) (source \(Int(videoSize.width))x\(Int(videoSize.height)))", type: "Info") if !bufferPool.configure(width: width, height: height) {
} else { Logger.shared.log("Failed to configure buffer pool for \(width)x\(height)", type: "Error")
Logger.shared.log("Rendering output at native size \(width)x\(height)", type: "Info") return
}
formatDescription = bufferPool.createFormatDescription()
// Flush display layer on format change
DispatchQueue.main.async { [weak self] in
guard let self else { return }
if #available(iOS 18.0, *) {
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
} else {
self.displayLayer.flushAndRemoveImage()
}
} }
} }
if poolWidth != width || poolHeight != height { guard let pooledBuffer = bufferPool.dequeueBuffer() else {
recreatePixelBufferPool(width: width, height: height) Logger.shared.log("Failed to dequeue buffer from pool", type: "Error")
}
var pixelBuffer: CVPixelBuffer?
var status: CVReturn = kCVReturnError
if !preAllocatedBuffers.isEmpty {
pixelBuffer = preAllocatedBuffers.removeFirst()
status = kCVReturnSuccess
} else if let pool = pixelBufferPool {
status = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, pixelBufferPoolAuxAttributes, &pixelBuffer)
}
if status != kCVReturnSuccess || pixelBuffer == nil {
let attrs: [CFString: Any] = [
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary,
kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue!,
kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue!,
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue!,
kCVPixelBufferWidthKey: width,
kCVPixelBufferHeightKey: height,
kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA
]
status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, attrs as CFDictionary, &pixelBuffer)
}
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
Logger.shared.log("Failed to create pixel buffer for rendering (status: \(status))", type: "Error")
return return
} }
let actualFormat = CVPixelBufferGetPixelFormatType(buffer) // Render to the IOSurface-backed pixel buffer
if actualFormat != kCVPixelFormatType_32BGRA { // The pixel buffer is Metal-compatible so this render goes through GPU when possible
Logger.shared.log("Pixel buffer format mismatch: expected BGRA (0x42475241), got \(actualFormat)", type: "Error") CVPixelBufferLockBaseAddress(pooledBuffer.pixelBuffer, [])
} guard let baseAddress = CVPixelBufferGetBaseAddress(pooledBuffer.pixelBuffer) else {
CVPixelBufferUnlockBaseAddress(pooledBuffer.pixelBuffer, [])
CVPixelBufferLockBaseAddress(buffer, []) bufferPool.enqueueBuffer(pooledBuffer)
guard let baseAddress = CVPixelBufferGetBaseAddress(buffer) else {
CVPixelBufferUnlockBaseAddress(buffer, [])
return return
} }
if shouldClearPixelBuffer {
let bufferDataSize = CVPixelBufferGetDataSize(buffer)
memset(baseAddress, 0, bufferDataSize)
shouldClearPixelBuffer = false
}
dimensionsArray[0] = Int32(width) dimensionsArray[0] = Int32(width)
dimensionsArray[1] = Int32(height) dimensionsArray[1] = Int32(height)
let stride = Int32(CVPixelBufferGetBytesPerRow(buffer)) let stride = Int32(CVPixelBufferGetBytesPerRow(pooledBuffer.pixelBuffer))
let expectedMinStride = Int32(width * 4)
if stride < expectedMinStride {
Logger.shared.log("Unexpected pixel buffer stride \(stride) < expected \(expectedMinStride) — skipping render to avoid memory corruption", type: "Error")
CVPixelBufferUnlockBaseAddress(buffer, [])
return
}
let pointerValue = baseAddress let pointerValue = baseAddress
dimensionsArray.withUnsafeMutableBufferPointer { dimsPointer in dimensionsArray.withUnsafeMutableBufferPointer { dimsPointer in
@@ -586,136 +530,35 @@ final class MPVSoftwareRenderer {
} }
} }
CVPixelBufferUnlockBaseAddress(buffer, []) CVPixelBufferUnlockBaseAddress(pooledBuffer.pixelBuffer, [])
enqueue(buffer: buffer) // Enqueue to display layer
enqueue(buffer: pooledBuffer)
if preAllocatedBuffers.count < 4 {
renderQueue.async { [weak self] in
self?.preAllocateBuffers()
}
}
} }
private func targetRenderSize(for videoSize: CGSize) -> CGSize { private func nextMonotonicPTS() -> Double {
guard videoSize.width > 0, videoSize.height > 0 else { return videoSize } let currentPos = interpolatedPosition()
guard let last = lastPTS
let screen = UIApplication.shared.connectedScenes let pts = max(currentPos, last + 0.001)
.compactMap({ ($0 as? UIWindowScene)?.screen }) lastPTS = pts
.first return pts
else {
fatalError("⚠️ No active screen found — app may not have a visible window yet.")
}
var scale = screen.scale
if scale <= 0 { scale = 1 }
let maxWidth = max(screen.bounds.width * scale, 1.0)
let maxHeight = max(screen.bounds.height * scale, 1.0)
if maxWidth <= 0 || maxHeight <= 0 {
return videoSize
}
let widthRatio = videoSize.width / maxWidth
let heightRatio = videoSize.height / maxHeight
let ratio = max(widthRatio, heightRatio, 1)
let targetWidth = max(1, Int(videoSize.width / ratio))
let targetHeight = max(1, Int(videoSize.height / ratio))
return CGSize(width: CGFloat(targetWidth), height: CGFloat(targetHeight))
} }
private func createPixelBufferPool(width: Int, height: Int) { private func interpolatedPosition() -> Double {
guard width > 0, height > 0 else { return } let basePosition = cachedPosition
let lastUpdate = positionUpdateTime
let paused = isPaused
let speed = playbackSpeed
let pixelFormat = kCVPixelFormatType_32BGRA guard !paused, lastUpdate > 0 else {
return basePosition
let attrs: [CFString: Any] = [
kCVPixelBufferPixelFormatTypeKey: pixelFormat,
kCVPixelBufferWidthKey: width,
kCVPixelBufferHeightKey: height,
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary,
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue!,
kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue!,
kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue!
]
let poolAttrs: [CFString: Any] = [
kCVPixelBufferPoolMinimumBufferCountKey: maxPreAllocatedBuffers,
kCVPixelBufferPoolMaximumBufferAgeKey: 0
]
let auxAttrs: [CFString: Any] = [
kCVPixelBufferPoolAllocationThresholdKey: 8
]
var pool: CVPixelBufferPool?
let status = CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttrs as CFDictionary, attrs as CFDictionary, &pool)
if status == kCVReturnSuccess, let pool {
renderQueueSync {
self.pixelBufferPool = pool
self.pixelBufferPoolAuxAttributes = auxAttrs as CFDictionary
self.poolWidth = width
self.poolHeight = height
}
renderQueue.async { [weak self] in
self?.preAllocateBuffers()
}
} else {
Logger.shared.log("Failed to create CVPixelBufferPool (status: \(status))", type: "Error")
} }
let elapsed = CACurrentMediaTime() - lastUpdate
return basePosition + (elapsed * speed)
} }
private func recreatePixelBufferPool(width: Int, height: Int) { private func enqueue(buffer: IOSurfaceBufferPool.PooledBuffer) {
renderQueueSync {
self.preAllocatedBuffers.removeAll()
self.pixelBufferPool = nil
self.formatDescription = nil
self.poolWidth = 0
self.poolHeight = 0
}
createPixelBufferPool(width: width, height: height)
}
private func preAllocateBuffers() {
guard DispatchQueue.getSpecific(key: renderQueueKey) != nil else {
renderQueue.async { [weak self] in
self?.preAllocateBuffers()
}
return
}
guard let pool = pixelBufferPool else { return }
let targetCount = min(maxPreAllocatedBuffers, 8)
let currentCount = preAllocatedBuffers.count
guard currentCount < targetCount else { return }
let bufferCount = targetCount - currentCount
for _ in 0..<bufferCount {
var buffer: CVPixelBuffer?
let status = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(
kCFAllocatorDefault,
pool,
pixelBufferPoolAuxAttributes,
&buffer
)
if status == kCVReturnSuccess, let buffer = buffer {
if preAllocatedBuffers.count < maxPreAllocatedBuffers {
preAllocatedBuffers.append(buffer)
}
} else {
if status != kCVReturnWouldExceedAllocationThreshold {
Logger.shared.log("Failed to pre-allocate buffer (status: \(status))", type: "Warn")
}
break
}
}
}
private func enqueue(buffer: CVPixelBuffer) {
let needsFlush = updateFormatDescriptionIfNeeded(for: buffer)
var shouldNotifyLoadingEnd = false var shouldNotifyLoadingEnd = false
renderQueueSync { renderQueueSync {
if self.isLoading { if self.isLoading {
@@ -723,45 +566,27 @@ final class MPVSoftwareRenderer {
shouldNotifyLoadingEnd = true shouldNotifyLoadingEnd = true
} }
} }
var capturedFormatDescription: CMVideoFormatDescription?
renderQueueSync {
capturedFormatDescription = self.formatDescription
}
guard let formatDescription = capturedFormatDescription else { guard let formatDescription = formatDescription else {
Logger.shared.log("Missing formatDescription when creating sample buffer — skipping frame", type: "Error") Logger.shared.log("Missing formatDescription when creating sample buffer", type: "Error")
bufferPool?.enqueueBuffer(buffer)
return return
} }
// Use interpolated position for smooth PTS (prevents jitter from discrete time-pos updates)
// Use monotonically increasing video position for smooth PTS + working PiP progress
let presentationTime = CMTime(seconds: nextMonotonicPTS(), preferredTimescale: 1000) let presentationTime = CMTime(seconds: nextMonotonicPTS(), preferredTimescale: 1000)
var timing = CMSampleTimingInfo(duration: .invalid, presentationTimeStamp: presentationTime, decodeTimeStamp: .invalid)
var sampleBuffer: CMSampleBuffer? guard let sampleBuffer = IOSurfaceBufferPool.createSampleBuffer(
let result = CMSampleBufferCreateForImageBuffer( from: buffer.pixelBuffer,
allocator: kCFAllocatorDefault,
imageBuffer: buffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: formatDescription, formatDescription: formatDescription,
sampleTiming: &timing, presentationTime: presentationTime
sampleBufferOut: &sampleBuffer ) else {
) bufferPool?.enqueueBuffer(buffer)
guard result == noErr, let sample = sampleBuffer else {
Logger.shared.log("Failed to create sample buffer (error: \(result), -12743 = invalid format)", type: "Error")
let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
let pixelFormat = CVPixelBufferGetPixelFormatType(buffer)
Logger.shared.log("Buffer info: \(width)x\(height), format: \(pixelFormat)", type: "Error")
return return
} }
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self else { return } guard let self else { return }
let (status, error): (AVQueuedSampleBufferRenderingStatus?, Error?) = { let (status, error): (AVQueuedSampleBufferRenderingStatus?, Error?) = {
if #available(iOS 18.0, *) { if #available(iOS 18.0, *) {
return ( return (
@@ -775,9 +600,10 @@ final class MPVSoftwareRenderer {
) )
} }
}() }()
if status == .failed { if status == .failed {
if let error = error { if let error = error {
Logger.shared.log("Display layer in failed state: \(error.localizedDescription)", type: "Error") Logger.shared.log("Display layer failed: \(error.localizedDescription)", type: "Error")
} }
if #available(iOS 18.0, *) { if #available(iOS 18.0, *) {
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil) self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
@@ -786,86 +612,29 @@ final class MPVSoftwareRenderer {
} }
} }
if needsFlush {
if #available(iOS 18.0, *) {
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
} else {
self.displayLayer.flushAndRemoveImage()
}
self.didFlushForFormatChange = true
} else if self.didFlushForFormatChange {
if #available(iOS 18.0, *) {
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: false, completionHandler: nil)
} else {
self.displayLayer.flush()
}
self.didFlushForFormatChange = false
}
if self.displayLayer.controlTimebase == nil { if self.displayLayer.controlTimebase == nil {
var timebase: CMTimebase? var timebase: CMTimebase?
if CMTimebaseCreateWithSourceClock(allocator: kCFAllocatorDefault, sourceClock: CMClockGetHostTimeClock(), timebaseOut: &timebase) == noErr, let timebase { if CMTimebaseCreateWithSourceClock(allocator: kCFAllocatorDefault, sourceClock: CMClockGetHostTimeClock(), timebaseOut: &timebase) == noErr, let timebase {
// Set rate based on current pause state and playback speed
CMTimebaseSetRate(timebase, rate: self.isPaused ? 0 : self.playbackSpeed) CMTimebaseSetRate(timebase, rate: self.isPaused ? 0 : self.playbackSpeed)
CMTimebaseSetTime(timebase, time: presentationTime) CMTimebaseSetTime(timebase, time: presentationTime)
self.displayLayer.controlTimebase = timebase self.displayLayer.controlTimebase = timebase
} else {
Logger.shared.log("Failed to create control timebase", type: "Error")
} }
} }
if shouldNotifyLoadingEnd { if shouldNotifyLoadingEnd {
self.delegate?.renderer(self, didChangeLoading: false) self.delegate?.renderer(self, didChangeLoading: false)
} }
if #available(iOS 18.0, *) { if #available(iOS 18.0, *) {
self.displayLayer.sampleBufferRenderer.enqueue(sample) self.displayLayer.sampleBufferRenderer.enqueue(sampleBuffer)
} else { } else {
self.displayLayer.enqueue(sample) self.displayLayer.enqueue(sampleBuffer)
}
}
}
private func updateFormatDescriptionIfNeeded(for buffer: CVPixelBuffer) -> Bool {
var didChange = false
let width = Int32(CVPixelBufferGetWidth(buffer))
let height = Int32(CVPixelBufferGetHeight(buffer))
let pixelFormat = CVPixelBufferGetPixelFormatType(buffer)
renderQueueSync {
var needsRecreate = false
if let description = formatDescription {
let currentDimensions = CMVideoFormatDescriptionGetDimensions(description)
let currentPixelFormat = CMFormatDescriptionGetMediaSubType(description)
if currentDimensions.width != width ||
currentDimensions.height != height ||
currentPixelFormat != pixelFormat {
needsRecreate = true
}
} else {
needsRecreate = true
} }
if needsRecreate { DispatchQueue.main.asyncAfter(deadline: .now() + 0.032) { [weak self, weak bufferPool] in
var newDescription: CMVideoFormatDescription? bufferPool?.enqueueBuffer(buffer)
let status = CMVideoFormatDescriptionCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: buffer,
formatDescriptionOut: &newDescription
)
if status == noErr, let newDescription = newDescription {
formatDescription = newDescription
didChange = true
Logger.shared.log("Created new format description: \(width)x\(height), format: \(pixelFormat)", type: "Info")
} else {
Logger.shared.log("Failed to create format description (status: \(status))", type: "Error")
}
} }
} }
return didChange
} }
private func renderQueueSync(_ block: () -> Void) { private func renderQueueSync(_ block: () -> Void) {
@@ -877,9 +646,7 @@ final class MPVSoftwareRenderer {
} }
private func currentVideoSize() -> CGSize { private func currentVideoSize() -> CGSize {
stateQueue.sync { stateQueue.sync { videoSize }
videoSize
}
} }
private func updateVideoSize(width: Int, height: Int) { private func updateVideoSize(width: Int, height: Int) {
@@ -887,15 +654,10 @@ final class MPVSoftwareRenderer {
stateQueue.async(flags: .barrier) { stateQueue.async(flags: .barrier) {
self.videoSize = size self.videoSize = size
} }
renderQueue.async { [weak self] in
guard let self else { return }
if self.poolWidth != width || self.poolHeight != height {
self.recreatePixelBufferPool(width: max(width, 0), height: max(height, 0))
}
}
} }
// MARK: - Commands
private func apply(commands: [[String]], on handle: OpaquePointer) { private func apply(commands: [[String]], on handle: OpaquePointer) {
for command in commands { for command in commands {
guard !command.isEmpty else { continue } guard !command.isEmpty else { continue }
@@ -903,7 +665,6 @@ final class MPVSoftwareRenderer {
} }
} }
/// Async command - returns immediately, mpv processes later
private func command(_ handle: OpaquePointer, _ args: [String]) { private func command(_ handle: OpaquePointer, _ args: [String]) {
guard !args.isEmpty else { return } guard !args.isEmpty else { return }
_ = withCStringArray(args) { pointer in _ = withCStringArray(args) { pointer in
@@ -911,7 +672,7 @@ final class MPVSoftwareRenderer {
} }
} }
/// Sync command - waits for mpv to process before returning @discardableResult
private func commandSync(_ handle: OpaquePointer, _ args: [String]) -> Int32 { private func commandSync(_ handle: OpaquePointer, _ args: [String]) -> Int32 {
guard !args.isEmpty else { return -1 } guard !args.isEmpty else { return -1 }
return withCStringArray(args) { pointer in return withCStringArray(args) { pointer in
@@ -919,6 +680,8 @@ final class MPVSoftwareRenderer {
} }
} }
// MARK: - Event Processing
private func processEvents() { private func processEvents() {
eventQueueGroup.enter() eventQueueGroup.enter()
let group = eventQueueGroup let group = eventQueueGroup
@@ -941,7 +704,6 @@ final class MPVSoftwareRenderer {
case MPV_EVENT_VIDEO_RECONFIG: case MPV_EVENT_VIDEO_RECONFIG:
refreshVideoState() refreshVideoState()
case MPV_EVENT_FILE_LOADED: case MPV_EVENT_FILE_LOADED:
// Add external subtitles now that the file is loaded
let hadExternalSubs = !pendingExternalSubtitles.isEmpty let hadExternalSubs = !pendingExternalSubtitles.isEmpty
if hadExternalSubs, let handle = mpv { if hadExternalSubs, let handle = mpv {
for subUrl in pendingExternalSubtitles { for subUrl in pendingExternalSubtitles {
@@ -949,7 +711,6 @@ final class MPVSoftwareRenderer {
} }
pendingExternalSubtitles = [] pendingExternalSubtitles = []
// Set subtitle after external subs are added (track IDs have changed)
if let subId = initialSubtitleId { if let subId = initialSubtitleId {
setSubtitleTrack(subId) setSubtitleTrack(subId)
} else { } else {
@@ -1007,13 +768,12 @@ final class MPVSoftwareRenderer {
delegate?.renderer(self, didUpdatePosition: cachedPosition, duration: cachedDuration) delegate?.renderer(self, didUpdatePosition: cachedPosition, duration: cachedDuration)
} }
case "time-pos": case "time-pos":
// Skip updates while seeking to prevent race condition
guard !isSeeking else { return } guard !isSeeking else { return }
var value = Double(0) var value = Double(0)
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_DOUBLE, value: &value) let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_DOUBLE, value: &value)
if status >= 0 { if status >= 0 {
cachedPosition = value cachedPosition = value
positionUpdateTime = CACurrentMediaTime() // Record when we got this update positionUpdateTime = CACurrentMediaTime()
delegate?.renderer(self, didUpdatePosition: cachedPosition, duration: cachedDuration) delegate?.renderer(self, didUpdatePosition: cachedPosition, duration: cachedDuration)
} }
case "pause": case "pause":
@@ -1023,7 +783,6 @@ final class MPVSoftwareRenderer {
let newPaused = flag != 0 let newPaused = flag != 0
if newPaused != isPaused { if newPaused != isPaused {
isPaused = newPaused isPaused = newPaused
// Update timebase rate - use playbackSpeed when playing, 0 when paused
let speed = self.playbackSpeed let speed = self.playbackSpeed
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
if let timebase = self?.displayLayer.controlTimebase { if let timebase = self?.displayLayer.controlTimebase {
@@ -1090,6 +849,7 @@ final class MPVSoftwareRenderer {
} }
// MARK: - Playback Controls // MARK: - Playback Controls
func play() { func play() {
setProperty(name: "pause", value: "no") setProperty(name: "pause", value: "no")
} }
@@ -1106,18 +866,13 @@ final class MPVSoftwareRenderer {
guard let handle = mpv else { return } guard let handle = mpv else { return }
let clamped = max(0, seconds) let clamped = max(0, seconds)
let wasPaused = isPaused let wasPaused = isPaused
// Prevent time-pos updates from overwriting during seek
isSeeking = true isSeeking = true
// Update cached position BEFORE seek so new frames get correct timestamp
cachedPosition = clamped cachedPosition = clamped
positionUpdateTime = CACurrentMediaTime() // Reset interpolation base positionUpdateTime = CACurrentMediaTime()
lastPTS = clamped // Reset monotonic PTS to new position lastPTS = clamped
// Update timebase to match new position (sets rate to 1 for frame display)
syncTimebase(to: clamped) syncTimebase(to: clamped)
// Sync seek for accurate positioning
commandSync(handle, ["seek", String(clamped), "absolute"]) commandSync(handle, ["seek", String(clamped), "absolute"])
isSeeking = false isSeeking = false
// Restore paused rate after seek completes
if wasPaused { if wasPaused {
restoreTimebaseRate() restoreTimebaseRate()
} }
@@ -1126,19 +881,14 @@ final class MPVSoftwareRenderer {
func seek(by seconds: Double) { func seek(by seconds: Double) {
guard let handle = mpv else { return } guard let handle = mpv else { return }
let wasPaused = isPaused let wasPaused = isPaused
// Prevent time-pos updates from overwriting during seek
isSeeking = true isSeeking = true
// Update cached position BEFORE seek
let newPosition = max(0, cachedPosition + seconds) let newPosition = max(0, cachedPosition + seconds)
cachedPosition = newPosition cachedPosition = newPosition
positionUpdateTime = CACurrentMediaTime() // Reset interpolation base positionUpdateTime = CACurrentMediaTime()
lastPTS = newPosition // Reset monotonic PTS to new position lastPTS = newPosition
// Update timebase to match new position (sets rate to 1 for frame display)
syncTimebase(to: newPosition) syncTimebase(to: newPosition)
// Sync seek for accurate positioning
commandSync(handle, ["seek", String(seconds), "relative"]) commandSync(handle, ["seek", String(seconds), "relative"])
isSeeking = false isSeeking = false
// Restore paused rate after seek completes
if wasPaused { if wasPaused {
restoreTimebaseRate() restoreTimebaseRate()
} }
@@ -1157,17 +907,13 @@ final class MPVSoftwareRenderer {
let speed = playbackSpeed let speed = playbackSpeed
let doWork = { [weak self] in let doWork = { [weak self] in
guard let self = self else { return } guard let self = self else { return }
// Flush old frames to avoid "old frames with new clock" mismatches
if #available(iOS 17.0, *) { if #available(iOS 17.0, *) {
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: false, completionHandler: nil) self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: false, completionHandler: nil)
} else { } else {
self.displayLayer.flush() self.displayLayer.flush()
} }
if let timebase = self.displayLayer.controlTimebase { if let timebase = self.displayLayer.controlTimebase {
// Update timebase to new position
CMTimebaseSetTime(timebase, time: CMTime(seconds: position, preferredTimescale: 1000)) CMTimebaseSetTime(timebase, time: CMTime(seconds: position, preferredTimescale: 1000))
// Set rate to playback speed during seek to ensure frame displays
// restoreTimebaseRate() will set it back to 0 if paused
CMTimebaseSetRate(timebase, rate: speed) CMTimebaseSetRate(timebase, rate: speed)
} }
} }
@@ -1179,7 +925,6 @@ final class MPVSoftwareRenderer {
} }
} }
/// Sync timebase with current position without flushing (for smooth PiP transitions)
func syncTimebase() { func syncTimebase() {
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self = self else { return } guard let self = self else { return }
@@ -1193,7 +938,6 @@ final class MPVSoftwareRenderer {
func setSpeed(_ speed: Double) { func setSpeed(_ speed: Double) {
playbackSpeed = speed playbackSpeed = speed
setProperty(name: "speed", value: String(speed)) setProperty(name: "speed", value: String(speed))
// Sync timebase rate with playback speed
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self = self, guard let self = self,
let timebase = self.displayLayer.controlTimebase else { return } let timebase = self.displayLayer.controlTimebase else { return }
@@ -1212,10 +956,7 @@ final class MPVSoftwareRenderer {
// MARK: - Subtitle Controls // MARK: - Subtitle Controls
func getSubtitleTracks() -> [[String: Any]] { func getSubtitleTracks() -> [[String: Any]] {
guard let handle = mpv else { guard let handle = mpv else { return [] }
Logger.shared.log("getSubtitleTracks: mpv handle is nil", type: "Warn")
return []
}
var tracks: [[String: Any]] = [] var tracks: [[String: Any]] = []
var trackCount: Int64 = 0 var trackCount: Int64 = 0
@@ -1246,24 +987,14 @@ final class MPVSoftwareRenderer {
getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected) getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected)
track["selected"] = selected != 0 track["selected"] = selected != 0
Logger.shared.log("getSubtitleTracks: found sub track id=\(trackId), title=\(track["title"] ?? "none"), lang=\(track["lang"] ?? "none")", type: "Info")
tracks.append(track) tracks.append(track)
} }
Logger.shared.log("getSubtitleTracks: returning \(tracks.count) subtitle tracks", type: "Info")
return tracks return tracks
} }
func setSubtitleTrack(_ trackId: Int) { func setSubtitleTrack(_ trackId: Int) {
Logger.shared.log("setSubtitleTrack: setting sid to \(trackId)", type: "Info")
guard let handle = mpv else {
Logger.shared.log("setSubtitleTrack: mpv handle is nil!", type: "Error")
return
}
// Use setProperty for synchronous behavior (command is async)
if trackId < 0 { if trackId < 0 {
// Disable subtitles
setProperty(name: "sid", value: "no") setProperty(name: "sid", value: "no")
} else { } else {
setProperty(name: "sid", value: String(trackId)) setProperty(name: "sid", value: String(trackId))
@@ -1283,7 +1014,6 @@ final class MPVSoftwareRenderer {
func addSubtitleFile(url: String, select: Bool = true) { func addSubtitleFile(url: String, select: Bool = true) {
guard let handle = mpv else { return } guard let handle = mpv else { return }
// "cached" adds without selecting, "select" adds and selects
let flag = select ? "select" : "cached" let flag = select ? "select" : "cached"
commandSync(handle, ["sub-add", url, flag]) commandSync(handle, ["sub-add", url, flag])
} }
@@ -1317,10 +1047,7 @@ final class MPVSoftwareRenderer {
// MARK: - Audio Track Controls // MARK: - Audio Track Controls
func getAudioTracks() -> [[String: Any]] { func getAudioTracks() -> [[String: Any]] {
guard let handle = mpv else { guard let handle = mpv else { return [] }
Logger.shared.log("getAudioTracks: mpv handle is nil", type: "Warn")
return []
}
var tracks: [[String: Any]] = [] var tracks: [[String: Any]] = []
var trackCount: Int64 = 0 var trackCount: Int64 = 0
@@ -1361,22 +1088,13 @@ final class MPVSoftwareRenderer {
getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected) getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected)
track["selected"] = selected != 0 track["selected"] = selected != 0
Logger.shared.log("getAudioTracks: found audio track id=\(trackId), title=\(track["title"] ?? "none"), lang=\(track["lang"] ?? "none")", type: "Info")
tracks.append(track) tracks.append(track)
} }
Logger.shared.log("getAudioTracks: returning \(tracks.count) audio tracks", type: "Info")
return tracks return tracks
} }
func setAudioTrack(_ trackId: Int) { func setAudioTrack(_ trackId: Int) {
guard let handle = mpv else {
Logger.shared.log("setAudioTrack: mpv handle is nil", type: "Warn")
return
}
Logger.shared.log("setAudioTrack: setting aid to \(trackId)", type: "Info")
// Use setProperty for synchronous behavior
setProperty(name: "aid", value: String(trackId)) setProperty(name: "aid", value: String(trackId))
} }

View File

@@ -4,25 +4,7 @@ public class MpvPlayerModule: Module {
public func definition() -> ModuleDefinition { public func definition() -> ModuleDefinition {
Name("MpvPlayer") Name("MpvPlayer")
// Defines event names that the module can send to JavaScript. // Enables the module to be used as a native view
Events("onChange")
// Defines a JavaScript synchronous function that runs the native code on the JavaScript thread.
Function("hello") {
return "Hello from MPV Player! 👋"
}
// Defines a JavaScript function that always returns a Promise and whose native code
// is by default dispatched on the different thread than the JavaScript runtime runs on.
AsyncFunction("setValueAsync") { (value: String) in
// Send an event to JavaScript.
self.sendEvent("onChange", [
"value": value
])
}
// Enables the module to be used as a native view. Definition components that are accepted as part of the
// view definition: Prop, Events.
View(MpvPlayerView.self) { View(MpvPlayerView.self) {
// All video load options are passed via a single "source" prop // All video load options are passed via a single "source" prop
Prop("source") { (view: MpvPlayerView, source: [String: Any]?) in Prop("source") { (view: MpvPlayerView, source: [String: Any]?) in
@@ -43,52 +25,44 @@ public class MpvPlayerModule: Module {
view.loadVideo(config: config) view.loadVideo(config: config)
} }
// Async function to play video // Playback controls
AsyncFunction("play") { (view: MpvPlayerView) in AsyncFunction("play") { (view: MpvPlayerView) in
view.play() view.play()
} }
// Async function to pause video
AsyncFunction("pause") { (view: MpvPlayerView) in AsyncFunction("pause") { (view: MpvPlayerView) in
view.pause() view.pause()
} }
// Async function to seek to position
AsyncFunction("seekTo") { (view: MpvPlayerView, position: Double) in AsyncFunction("seekTo") { (view: MpvPlayerView, position: Double) in
view.seekTo(position: position) view.seekTo(position: position)
} }
// Async function to seek by offset
AsyncFunction("seekBy") { (view: MpvPlayerView, offset: Double) in AsyncFunction("seekBy") { (view: MpvPlayerView, offset: Double) in
view.seekBy(offset: offset) view.seekBy(offset: offset)
} }
// Async function to set playback speed
AsyncFunction("setSpeed") { (view: MpvPlayerView, speed: Double) in AsyncFunction("setSpeed") { (view: MpvPlayerView, speed: Double) in
view.setSpeed(speed: speed) view.setSpeed(speed: speed)
} }
// Function to get current speed
AsyncFunction("getSpeed") { (view: MpvPlayerView) -> Double in AsyncFunction("getSpeed") { (view: MpvPlayerView) -> Double in
return view.getSpeed() return view.getSpeed()
} }
// Function to check if paused
AsyncFunction("isPaused") { (view: MpvPlayerView) -> Bool in AsyncFunction("isPaused") { (view: MpvPlayerView) -> Bool in
return view.isPaused() return view.isPaused()
} }
// Function to get current position
AsyncFunction("getCurrentPosition") { (view: MpvPlayerView) -> Double in AsyncFunction("getCurrentPosition") { (view: MpvPlayerView) -> Double in
return view.getCurrentPosition() return view.getCurrentPosition()
} }
// Function to get duration
AsyncFunction("getDuration") { (view: MpvPlayerView) -> Double in AsyncFunction("getDuration") { (view: MpvPlayerView) -> Double in
return view.getDuration() return view.getDuration()
} }
// Picture in Picture functions // Picture in Picture
AsyncFunction("startPictureInPicture") { (view: MpvPlayerView) in AsyncFunction("startPictureInPicture") { (view: MpvPlayerView) in
view.startPictureInPicture() view.startPictureInPicture()
} }
@@ -126,7 +100,7 @@ public class MpvPlayerModule: Module {
view.addSubtitleFile(url: url, select: select) view.addSubtitleFile(url: url, select: select)
} }
// Subtitle positioning functions // Subtitle positioning
AsyncFunction("setSubtitlePosition") { (view: MpvPlayerView, position: Int) in AsyncFunction("setSubtitlePosition") { (view: MpvPlayerView, position: Int) in
view.setSubtitlePosition(position) view.setSubtitlePosition(position)
} }
@@ -164,7 +138,7 @@ public class MpvPlayerModule: Module {
return view.getCurrentAudioTrack() return view.getCurrentAudioTrack()
} }
// Defines events that the view can send to JavaScript // Events that the view can send to JavaScript
Events("onLoad", "onPlaybackStateChange", "onProgress", "onError", "onTracksReady") Events("onLoad", "onPlaybackStateChange", "onProgress", "onError", "onTracksReady")
} }
} }

View File

@@ -5,393 +5,376 @@ import UIKit
/// Configuration for loading a video /// Configuration for loading a video
struct VideoLoadConfig { struct VideoLoadConfig {
let url: URL let url: URL
var headers: [String: String]? var headers: [String: String]?
var externalSubtitles: [String]? var externalSubtitles: [String]?
var startPosition: Double? var startPosition: Double?
var autoplay: Bool var autoplay: Bool
/// MPV subtitle track ID to select on start (1-based, -1 to disable, nil to use default) /// MPV subtitle track ID to select on start (1-based, -1 to disable, nil to use default)
var initialSubtitleId: Int? var initialSubtitleId: Int?
/// MPV audio track ID to select on start (1-based, nil to use default) /// MPV audio track ID to select on start (1-based, nil to use default)
var initialAudioId: Int? var initialAudioId: Int?
init( init(
url: URL, url: URL,
headers: [String: String]? = nil, headers: [String: String]? = nil,
externalSubtitles: [String]? = nil, externalSubtitles: [String]? = nil,
startPosition: Double? = nil, startPosition: Double? = nil,
autoplay: Bool = true, autoplay: Bool = true,
initialSubtitleId: Int? = nil, initialSubtitleId: Int? = nil,
initialAudioId: Int? = nil initialAudioId: Int? = nil
) { ) {
self.url = url self.url = url
self.headers = headers self.headers = headers
self.externalSubtitles = externalSubtitles self.externalSubtitles = externalSubtitles
self.startPosition = startPosition self.startPosition = startPosition
self.autoplay = autoplay self.autoplay = autoplay
self.initialSubtitleId = initialSubtitleId self.initialSubtitleId = initialSubtitleId
self.initialAudioId = initialAudioId self.initialAudioId = initialAudioId
} }
} }
// This view will be used as a native component. Make sure to inherit from `ExpoView` // This view will be used as a native component. Make sure to inherit from `ExpoView`
// to apply the proper styling (e.g. border radius and shadows). // to apply the proper styling (e.g. border radius and shadows).
class MpvPlayerView: ExpoView { class MpvPlayerView: ExpoView {
private let displayLayer = AVSampleBufferDisplayLayer() private let displayLayer = AVSampleBufferDisplayLayer()
private var renderer: MPVSoftwareRenderer? private var renderer: MPVMetalRenderer?
private var videoContainer: UIView! private var videoContainer: UIView!
private var pipController: PiPController? private var pipController: PiPController?
let onLoad = EventDispatcher() let onLoad = EventDispatcher()
let onPlaybackStateChange = EventDispatcher() let onPlaybackStateChange = EventDispatcher()
let onProgress = EventDispatcher() let onProgress = EventDispatcher()
let onError = EventDispatcher() let onError = EventDispatcher()
let onTracksReady = EventDispatcher() let onTracksReady = EventDispatcher()
private var currentURL: URL? private var currentURL: URL?
private var cachedPosition: Double = 0 private var cachedPosition: Double = 0
private var cachedDuration: Double = 0 private var cachedDuration: Double = 0
private var intendedPlayState: Bool = false // For PiP - ignores transient states during seek private var intendedPlayState: Bool = false
required init(appContext: AppContext? = nil) { required init(appContext: AppContext? = nil) {
super.init(appContext: appContext) super.init(appContext: appContext)
setupView() setupView()
} }
private func setupView() { private func setupView() {
clipsToBounds = true clipsToBounds = true
backgroundColor = .black backgroundColor = .black
videoContainer = UIView() videoContainer = UIView()
videoContainer.translatesAutoresizingMaskIntoConstraints = false videoContainer.translatesAutoresizingMaskIntoConstraints = false
videoContainer.backgroundColor = .black videoContainer.backgroundColor = .black
videoContainer.clipsToBounds = true videoContainer.clipsToBounds = true
addSubview(videoContainer) addSubview(videoContainer)
displayLayer.frame = bounds displayLayer.frame = bounds
displayLayer.videoGravity = .resizeAspect displayLayer.videoGravity = .resizeAspect
if #available(iOS 17.0, *) { if #available(iOS 17.0, *) {
displayLayer.wantsExtendedDynamicRangeContent = true displayLayer.wantsExtendedDynamicRangeContent = true
} }
displayLayer.backgroundColor = UIColor.black.cgColor displayLayer.backgroundColor = UIColor.black.cgColor
videoContainer.layer.addSublayer(displayLayer) videoContainer.layer.addSublayer(displayLayer)
NSLayoutConstraint.activate([ NSLayoutConstraint.activate([
videoContainer.topAnchor.constraint(equalTo: topAnchor), videoContainer.topAnchor.constraint(equalTo: topAnchor),
videoContainer.leadingAnchor.constraint(equalTo: leadingAnchor), videoContainer.leadingAnchor.constraint(equalTo: leadingAnchor),
videoContainer.trailingAnchor.constraint(equalTo: trailingAnchor), videoContainer.trailingAnchor.constraint(equalTo: trailingAnchor),
videoContainer.bottomAnchor.constraint(equalTo: bottomAnchor) videoContainer.bottomAnchor.constraint(equalTo: bottomAnchor)
]) ])
renderer = MPVSoftwareRenderer(displayLayer: displayLayer) do {
renderer?.delegate = self renderer = try MPVMetalRenderer(displayLayer: displayLayer)
renderer?.delegate = self
try renderer?.start()
} catch MPVMetalRenderer.RendererError.metalNotSupported {
onError(["error": "Metal is not supported on this device"])
} catch {
onError(["error": "Failed to start renderer: \(error.localizedDescription)"])
}
// Setup PiP // Setup PiP
pipController = PiPController(sampleBufferDisplayLayer: displayLayer) pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
pipController?.delegate = self pipController?.delegate = self
}
do { override func layoutSubviews() {
try renderer?.start() super.layoutSubviews()
} catch { CATransaction.begin()
onError(["error": "Failed to start renderer: \(error.localizedDescription)"]) CATransaction.setDisableActions(true)
} displayLayer.frame = videoContainer.bounds
} displayLayer.isHidden = false
displayLayer.opacity = 1.0
CATransaction.commit()
}
override func layoutSubviews() { func loadVideo(config: VideoLoadConfig) {
super.layoutSubviews() // Skip reload if same URL is already playing
CATransaction.begin() if currentURL == config.url {
CATransaction.setDisableActions(true) return
displayLayer.frame = videoContainer.bounds }
displayLayer.isHidden = false currentURL = config.url
displayLayer.opacity = 1.0
CATransaction.commit()
}
func loadVideo(config: VideoLoadConfig) { let preset = PlayerPreset(
// Skip reload if same URL is already playing id: .sdrRec709,
if currentURL == config.url { title: "Default",
return summary: "Default playback preset",
} stream: nil,
currentURL = config.url commands: []
)
let preset = PlayerPreset( // Pass everything to the renderer
id: .sdrRec709, renderer?.load(
title: "Default", url: config.url,
summary: "Default playback preset", with: preset,
stream: nil, headers: config.headers,
commands: [] startPosition: config.startPosition,
) externalSubtitles: config.externalSubtitles,
initialSubtitleId: config.initialSubtitleId,
initialAudioId: config.initialAudioId
)
if config.autoplay {
play()
}
onLoad(["url": config.url.absoluteString])
}
// Convenience method for simple loads
func loadVideo(url: URL, headers: [String: String]? = nil) {
loadVideo(config: VideoLoadConfig(url: url, headers: headers))
}
// Pass everything to the renderer - it handles start position and external subs func play() {
renderer?.load( intendedPlayState = true
url: config.url, renderer?.play()
with: preset, pipController?.updatePlaybackState()
headers: config.headers, }
startPosition: config.startPosition,
externalSubtitles: config.externalSubtitles,
initialSubtitleId: config.initialSubtitleId,
initialAudioId: config.initialAudioId
)
if config.autoplay {
play()
}
onLoad(["url": config.url.absoluteString])
}
// Convenience method for simple loads
func loadVideo(url: URL, headers: [String: String]? = nil) {
loadVideo(config: VideoLoadConfig(url: url, headers: headers))
}
func play() { func pause() {
intendedPlayState = true intendedPlayState = false
renderer?.play() renderer?.pausePlayback()
pipController?.updatePlaybackState() pipController?.updatePlaybackState()
} }
func pause() { func seekTo(position: Double) {
intendedPlayState = false renderer?.seek(to: position)
renderer?.pausePlayback() }
pipController?.updatePlaybackState()
}
func seekTo(position: Double) { func seekBy(offset: Double) {
renderer?.seek(to: position) renderer?.seek(by: offset)
} }
func seekBy(offset: Double) { func setSpeed(speed: Double) {
renderer?.seek(by: offset) renderer?.setSpeed(speed)
} }
func setSpeed(speed: Double) { func getSpeed() -> Double {
renderer?.setSpeed(speed) return renderer?.getSpeed() ?? 1.0
} }
func getSpeed() -> Double { func isPaused() -> Bool {
return renderer?.getSpeed() ?? 1.0 return renderer?.isPausedState ?? true
} }
func isPaused() -> Bool { func getCurrentPosition() -> Double {
return renderer?.isPausedState ?? true return cachedPosition
} }
func getCurrentPosition() -> Double { func getDuration() -> Double {
return cachedPosition return cachedDuration
} }
func getDuration() -> Double { // MARK: - Picture in Picture
return cachedDuration
}
// MARK: - Picture in Picture func startPictureInPicture() {
pipController?.startPictureInPicture()
}
func startPictureInPicture() { func stopPictureInPicture() {
print("🎬 MpvPlayerView: startPictureInPicture called") pipController?.stopPictureInPicture()
print("🎬 Duration: \(getDuration()), IsPlaying: \(!isPaused())") }
pipController?.startPictureInPicture()
}
func stopPictureInPicture() { func isPictureInPictureSupported() -> Bool {
pipController?.stopPictureInPicture() return pipController?.isPictureInPictureSupported ?? false
} }
func isPictureInPictureSupported() -> Bool { func isPictureInPictureActive() -> Bool {
return pipController?.isPictureInPictureSupported ?? false return pipController?.isPictureInPictureActive ?? false
} }
// MARK: - Subtitle Controls
func getSubtitleTracks() -> [[String: Any]] {
return renderer?.getSubtitleTracks() ?? []
}
func setSubtitleTrack(_ trackId: Int) {
renderer?.setSubtitleTrack(trackId)
}
func disableSubtitles() {
renderer?.disableSubtitles()
}
func getCurrentSubtitleTrack() -> Int {
return renderer?.getCurrentSubtitleTrack() ?? 0
}
func addSubtitleFile(url: String, select: Bool = true) {
renderer?.addSubtitleFile(url: url, select: select)
}
// MARK: - Audio Track Controls
func getAudioTracks() -> [[String: Any]] {
return renderer?.getAudioTracks() ?? []
}
func setAudioTrack(_ trackId: Int) {
renderer?.setAudioTrack(trackId)
}
func getCurrentAudioTrack() -> Int {
return renderer?.getCurrentAudioTrack() ?? 0
}
// MARK: - Subtitle Positioning
func setSubtitlePosition(_ position: Int) {
renderer?.setSubtitlePosition(position)
}
func setSubtitleScale(_ scale: Double) {
renderer?.setSubtitleScale(scale)
}
func setSubtitleMarginY(_ margin: Int) {
renderer?.setSubtitleMarginY(margin)
}
func setSubtitleAlignX(_ alignment: String) {
renderer?.setSubtitleAlignX(alignment)
}
func setSubtitleAlignY(_ alignment: String) {
renderer?.setSubtitleAlignY(alignment)
}
func setSubtitleFontSize(_ size: Int) {
renderer?.setSubtitleFontSize(size)
}
func isPictureInPictureActive() -> Bool { deinit {
return pipController?.isPictureInPictureActive ?? false pipController?.stopPictureInPicture()
} renderer?.stop()
displayLayer.controlTimebase = nil
// MARK: - Subtitle Controls displayLayer.removeFromSuperlayer()
}
func getSubtitleTracks() -> [[String: Any]] {
return renderer?.getSubtitleTracks() ?? []
}
func setSubtitleTrack(_ trackId: Int) {
renderer?.setSubtitleTrack(trackId)
}
func disableSubtitles() {
renderer?.disableSubtitles()
}
func getCurrentSubtitleTrack() -> Int {
return renderer?.getCurrentSubtitleTrack() ?? 0
}
func addSubtitleFile(url: String, select: Bool = true) {
renderer?.addSubtitleFile(url: url, select: select)
}
// MARK: - Audio Track Controls
func getAudioTracks() -> [[String: Any]] {
return renderer?.getAudioTracks() ?? []
}
func setAudioTrack(_ trackId: Int) {
renderer?.setAudioTrack(trackId)
}
func getCurrentAudioTrack() -> Int {
return renderer?.getCurrentAudioTrack() ?? 0
}
// MARK: - Subtitle Positioning
func setSubtitlePosition(_ position: Int) {
renderer?.setSubtitlePosition(position)
}
func setSubtitleScale(_ scale: Double) {
renderer?.setSubtitleScale(scale)
}
func setSubtitleMarginY(_ margin: Int) {
renderer?.setSubtitleMarginY(margin)
}
func setSubtitleAlignX(_ alignment: String) {
renderer?.setSubtitleAlignX(alignment)
}
func setSubtitleAlignY(_ alignment: String) {
renderer?.setSubtitleAlignY(alignment)
}
func setSubtitleFontSize(_ size: Int) {
renderer?.setSubtitleFontSize(size)
}
deinit {
pipController?.stopPictureInPicture()
renderer?.stop()
displayLayer.removeFromSuperlayer()
}
} }
// MARK: - MPVSoftwareRendererDelegate // MARK: - MPVMetalRendererDelegate
extension MpvPlayerView: MPVSoftwareRendererDelegate { extension MpvPlayerView: MPVMetalRendererDelegate {
func renderer(_: MPVSoftwareRenderer, didUpdatePosition position: Double, duration: Double) { func renderer(_: MPVMetalRenderer, didUpdatePosition position: Double, duration: Double) {
cachedPosition = position cachedPosition = position
cachedDuration = duration cachedDuration = duration
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self else { return } guard let self else { return }
// Only update PiP state when PiP is active if self.pipController?.isPictureInPictureActive == true {
if self.pipController?.isPictureInPictureActive == true { self.pipController?.updatePlaybackState()
self.pipController?.updatePlaybackState() }
}
self.onProgress([
self.onProgress([ "position": position,
"position": position, "duration": duration,
"duration": duration, "progress": duration > 0 ? position / duration : 0,
"progress": duration > 0 ? position / duration : 0, ])
]) }
} }
}
func renderer(_: MPVSoftwareRenderer, didChangePause isPaused: Bool) { func renderer(_: MPVMetalRenderer, didChangePause isPaused: Bool) {
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self else { return } guard let self else { return }
// Don't update intendedPlayState here - it's only set by user actions (play/pause) self.onPlaybackStateChange([
// This prevents PiP UI flicker during seeking "isPaused": isPaused,
self.onPlaybackStateChange([ "isPlaying": !isPaused,
"isPaused": isPaused, ])
"isPlaying": !isPaused, }
]) }
// Note: Don't call updatePlaybackState() here to avoid flicker
// PiP queries pipControllerIsPlaying when it needs the state
}
}
func renderer(_: MPVSoftwareRenderer, didChangeLoading isLoading: Bool) { func renderer(_: MPVMetalRenderer, didChangeLoading isLoading: Bool) {
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self else { return } guard let self else { return }
self.onPlaybackStateChange([ self.onPlaybackStateChange([
"isLoading": isLoading, "isLoading": isLoading,
]) ])
} }
} }
func renderer(_: MPVSoftwareRenderer, didBecomeReadyToSeek: Bool) { func renderer(_: MPVMetalRenderer, didBecomeReadyToSeek: Bool) {
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self else { return } guard let self else { return }
self.onPlaybackStateChange([ self.onPlaybackStateChange([
"isReadyToSeek": didBecomeReadyToSeek, "isReadyToSeek": didBecomeReadyToSeek,
]) ])
} }
} }
func renderer(_: MPVSoftwareRenderer, didBecomeTracksReady: Bool) { func renderer(_: MPVMetalRenderer, didBecomeTracksReady: Bool) {
DispatchQueue.main.async { [weak self] in DispatchQueue.main.async { [weak self] in
guard let self else { return } guard let self else { return }
self.onTracksReady([:]) self.onTracksReady([:])
} }
} }
} }
// MARK: - PiPControllerDelegate // MARK: - PiPControllerDelegate
extension MpvPlayerView: PiPControllerDelegate { extension MpvPlayerView: PiPControllerDelegate {
func pipController(_ controller: PiPController, willStartPictureInPicture: Bool) { func pipController(_ controller: PiPController, willStartPictureInPicture: Bool) {
print("PiP will start") renderer?.syncTimebase()
// Sync timebase before PiP starts for smooth transition pipController?.updatePlaybackState()
renderer?.syncTimebase() }
pipController?.updatePlaybackState()
} func pipController(_ controller: PiPController, didStartPictureInPicture: Bool) {
pipController?.updatePlaybackState()
func pipController(_ controller: PiPController, didStartPictureInPicture: Bool) { }
print("PiP did start: \(didStartPictureInPicture)")
pipController?.updatePlaybackState() func pipController(_ controller: PiPController, willStopPictureInPicture: Bool) {
} renderer?.syncTimebase()
}
func pipController(_ controller: PiPController, willStopPictureInPicture: Bool) {
print("PiP will stop") func pipController(_ controller: PiPController, didStopPictureInPicture: Bool) {
// Sync timebase before returning from PiP renderer?.syncTimebase()
renderer?.syncTimebase() pipController?.updatePlaybackState()
} }
func pipController(_ controller: PiPController, didStopPictureInPicture: Bool) { func pipController(_ controller: PiPController, restoreUserInterfaceForPictureInPictureStop completionHandler: @escaping (Bool) -> Void) {
print("PiP did stop") completionHandler(true)
// Ensure timebase is synced after PiP ends }
renderer?.syncTimebase()
pipController?.updatePlaybackState() func pipControllerPlay(_ controller: PiPController) {
} play()
}
func pipController(_ controller: PiPController, restoreUserInterfaceForPictureInPictureStop completionHandler: @escaping (Bool) -> Void) {
print("PiP restore user interface") func pipControllerPause(_ controller: PiPController) {
completionHandler(true) pause()
} }
func pipControllerPlay(_ controller: PiPController) { func pipController(_ controller: PiPController, skipByInterval interval: CMTime) {
print("PiP play requested") let seconds = CMTimeGetSeconds(interval)
play() let target = max(0, cachedPosition + seconds)
} seekTo(position: target)
}
func pipControllerPause(_ controller: PiPController) {
print("PiP pause requested") func pipControllerIsPlaying(_ controller: PiPController) -> Bool {
pause() return intendedPlayState
} }
func pipController(_ controller: PiPController, skipByInterval interval: CMTime) { func pipControllerDuration(_ controller: PiPController) -> Double {
let seconds = CMTimeGetSeconds(interval) return getDuration()
print("PiP skip by interval: \(seconds)") }
let target = max(0, cachedPosition + seconds)
seekTo(position: target)
}
func pipControllerIsPlaying(_ controller: PiPController) -> Bool {
// Use intended state to ignore transient pauses during seeking
return intendedPlayState
}
func pipControllerDuration(_ controller: PiPController) -> Double {
return getDuration()
}
} }

View File

@@ -103,7 +103,7 @@ extension PiPController: AVPictureInPictureControllerDelegate {
} }
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) { func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) {
print("Failed to start PiP: \(error)") Logger.shared.log("Failed to start PiP: \(error.localizedDescription)", type: "Error")
delegate?.pipController(self, didStartPictureInPicture: false) delegate?.pipController(self, didStartPictureInPicture: false)
} }
@@ -169,4 +169,4 @@ extension PiPController: AVPictureInPictureSampleBufferPlaybackDelegate {
} }
completion() completion()
} }
} }

View File

@@ -1,72 +0,0 @@
import UIKit
import AVFoundation
final class SampleBufferDisplayView: UIView {
override class var layerClass: AnyClass { AVSampleBufferDisplayLayer.self }
var displayLayer: AVSampleBufferDisplayLayer {
return layer as! AVSampleBufferDisplayLayer
}
private(set) var pipController: PiPController?
weak var pipDelegate: PiPControllerDelegate? {
didSet {
pipController?.delegate = pipDelegate
}
}
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
commonInit()
}
private func commonInit() {
backgroundColor = .black
displayLayer.videoGravity = .resizeAspect
#if !os(tvOS)
#if compiler(>=6.0)
if #available(iOS 26.0, *) {
displayLayer.preferredDynamicRange = .automatic
} else if #available(iOS 17.0, *) {
displayLayer.wantsExtendedDynamicRangeContent = true
}
#endif
if #available(iOS 17.0, *) {
displayLayer.wantsExtendedDynamicRangeContent = true
}
#endif
setupPictureInPicture()
}
private func setupPictureInPicture() {
pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
}
// MARK: - PiP Control Methods
func startPictureInPicture() {
pipController?.startPictureInPicture()
}
func stopPictureInPicture() {
pipController?.stopPictureInPicture()
}
var isPictureInPictureSupported: Bool {
return pipController?.isPictureInPictureSupported ?? false
}
var isPictureInPictureActive: Bool {
return pipController?.isPictureInPictureActive ?? false
}
var isPictureInPicturePossible: Bool {
return pipController?.isPictureInPicturePossible ?? false
}
}

View File

@@ -23,14 +23,6 @@ export type OnErrorEventPayload = {
export type OnTracksReadyEventPayload = Record<string, never>; export type OnTracksReadyEventPayload = Record<string, never>;
export type MpvPlayerModuleEvents = {
onChange: (params: ChangeEventPayload) => void;
};
export type ChangeEventPayload = {
value: string;
};
export type VideoSource = { export type VideoSource = {
url: string; url: string;
headers?: Record<string, string>; headers?: Record<string, string>;

View File

@@ -1,11 +0,0 @@
import { NativeModule, requireNativeModule } from "expo";
import { MpvPlayerModuleEvents } from "./MpvPlayer.types";
declare class MpvPlayerModule extends NativeModule<MpvPlayerModuleEvents> {
hello(): string;
setValueAsync(value: string): Promise<void>;
}
// This call loads the native module object from the JSI.
export default requireNativeModule<MpvPlayerModule>("MpvPlayer");

View File

@@ -1,19 +0,0 @@
import { NativeModule, registerWebModule } from "expo";
import { ChangeEventPayload } from "./MpvPlayer.types";
type MpvPlayerModuleEvents = {
onChange: (params: ChangeEventPayload) => void;
};
class MpvPlayerModule extends NativeModule<MpvPlayerModuleEvents> {
PI = Math.PI;
async setValueAsync(value: string): Promise<void> {
this.emit("onChange", { value });
}
hello() {
return "Hello world! 👋";
}
}
export default registerWebModule(MpvPlayerModule, "MpvPlayerModule");

View File

@@ -28,16 +28,16 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
await nativeRef.current?.setSpeed(speed); await nativeRef.current?.setSpeed(speed);
}, },
getSpeed: async () => { getSpeed: async () => {
return await nativeRef.current?.getSpeed(); return (await nativeRef.current?.getSpeed()) ?? 1.0;
}, },
isPaused: async () => { isPaused: async () => {
return await nativeRef.current?.isPaused(); return (await nativeRef.current?.isPaused()) ?? true;
}, },
getCurrentPosition: async () => { getCurrentPosition: async () => {
return await nativeRef.current?.getCurrentPosition(); return (await nativeRef.current?.getCurrentPosition()) ?? 0;
}, },
getDuration: async () => { getDuration: async () => {
return await nativeRef.current?.getDuration(); return (await nativeRef.current?.getDuration()) ?? 0;
}, },
startPictureInPicture: async () => { startPictureInPicture: async () => {
await nativeRef.current?.startPictureInPicture(); await nativeRef.current?.startPictureInPicture();
@@ -46,13 +46,15 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
await nativeRef.current?.stopPictureInPicture(); await nativeRef.current?.stopPictureInPicture();
}, },
isPictureInPictureSupported: async () => { isPictureInPictureSupported: async () => {
return await nativeRef.current?.isPictureInPictureSupported(); return (
(await nativeRef.current?.isPictureInPictureSupported()) ?? false
);
}, },
isPictureInPictureActive: async () => { isPictureInPictureActive: async () => {
return await nativeRef.current?.isPictureInPictureActive(); return (await nativeRef.current?.isPictureInPictureActive()) ?? false;
}, },
getSubtitleTracks: async () => { getSubtitleTracks: async () => {
return await nativeRef.current?.getSubtitleTracks(); return (await nativeRef.current?.getSubtitleTracks()) ?? [];
}, },
setSubtitleTrack: async (trackId: number) => { setSubtitleTrack: async (trackId: number) => {
await nativeRef.current?.setSubtitleTrack(trackId); await nativeRef.current?.setSubtitleTrack(trackId);
@@ -61,7 +63,7 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
await nativeRef.current?.disableSubtitles(); await nativeRef.current?.disableSubtitles();
}, },
getCurrentSubtitleTrack: async () => { getCurrentSubtitleTrack: async () => {
return await nativeRef.current?.getCurrentSubtitleTrack(); return (await nativeRef.current?.getCurrentSubtitleTrack()) ?? 0;
}, },
addSubtitleFile: async (url: string, select = true) => { addSubtitleFile: async (url: string, select = true) => {
await nativeRef.current?.addSubtitleFile(url, select); await nativeRef.current?.addSubtitleFile(url, select);
@@ -84,15 +86,14 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
setSubtitleFontSize: async (size: number) => { setSubtitleFontSize: async (size: number) => {
await nativeRef.current?.setSubtitleFontSize(size); await nativeRef.current?.setSubtitleFontSize(size);
}, },
// Audio controls
getAudioTracks: async () => { getAudioTracks: async () => {
return await nativeRef.current?.getAudioTracks(); return (await nativeRef.current?.getAudioTracks()) ?? [];
}, },
setAudioTrack: async (trackId: number) => { setAudioTrack: async (trackId: number) => {
await nativeRef.current?.setAudioTrack(trackId); await nativeRef.current?.setAudioTrack(trackId);
}, },
getCurrentAudioTrack: async () => { getCurrentAudioTrack: async () => {
return await nativeRef.current?.getCurrentAudioTrack(); return (await nativeRef.current?.getCurrentAudioTrack()) ?? 0;
}, },
})); }));

View File

@@ -1,14 +0,0 @@
import { MpvPlayerViewProps } from "./MpvPlayer.types";
export default function MpvPlayerView(props: MpvPlayerViewProps) {
return (
<div>
<iframe
title='MPV Player'
style={{ flex: 1 }}
src={props.url}
onLoad={() => props.onLoad({ nativeEvent: { url: props.url } })}
/>
</div>
);
}

View File

@@ -1,3 +1,2 @@
export * from "./MpvPlayer.types"; export * from "./MpvPlayer.types";
export { default as MpvPlayerModule } from "./MpvPlayerModule";
export { default as MpvPlayerView } from "./MpvPlayerView"; export { default as MpvPlayerView } from "./MpvPlayerView";

View File

@@ -96,7 +96,6 @@ export const WebSocketProvider = ({ children }: WebSocketProviderProps) => {
newWebSocket.onmessage = (e) => { newWebSocket.onmessage = (e) => {
try { try {
const message = JSON.parse(e.data); const message = JSON.parse(e.data);
console.log("[WS] Received message:", message);
setLastMessage(message); // Store the last message in context setLastMessage(message); // Store the last message in context
} catch (error) { } catch (error) {
console.error("Error parsing WebSocket message:", error); console.error("Error parsing WebSocket message:", error);
@@ -124,12 +123,10 @@ export const WebSocketProvider = ({ children }: WebSocketProviderProps) => {
const handlePlayCommand = useCallback( const handlePlayCommand = useCallback(
(data: any) => { (data: any) => {
if (!data || !data.ItemIds || !data.ItemIds.length) { if (!data || !data.ItemIds || !data.ItemIds.length) {
console.warn("[WS] Received Play command with no items");
return; return;
} }
const itemId = data.ItemIds[0]; const itemId = data.ItemIds[0];
console.log(`[WS] Handling Play command for item: ${itemId}`);
router.push({ router.push({
pathname: "/(auth)/player/direct-player", pathname: "/(auth)/player/direct-player",