mirror of
https://github.com/streamyfin/streamyfin.git
synced 2026-01-15 15:48:05 +00:00
wip
This commit is contained in:
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"platforms": ["apple", "android", "web"],
|
||||
"platforms": ["apple"],
|
||||
"apple": {
|
||||
"modules": ["MpvPlayerModule"]
|
||||
},
|
||||
"android": {
|
||||
"modules": ["expo.modules.mpvplayer.MpvPlayerModule"]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,2 @@
|
||||
// Reexport the native module. On web, it will be resolved to MpvPlayerModule.web.ts
|
||||
// and on native platforms to MpvPlayerModule.ts
|
||||
|
||||
export * from "./src/MpvPlayer.types";
|
||||
export { default } from "./src/MpvPlayerModule";
|
||||
export { default as MpvPlayerView } from "./src/MpvPlayerView";
|
||||
|
||||
247
modules/mpv-player/ios/IOSurfaceBufferPool.swift
Normal file
247
modules/mpv-player/ios/IOSurfaceBufferPool.swift
Normal file
@@ -0,0 +1,247 @@
|
||||
import Foundation
|
||||
import CoreVideo
|
||||
import Metal
|
||||
import CoreMedia
|
||||
import AVFoundation
|
||||
|
||||
/// Manages a pool of IOSurface-backed CVPixelBuffers that can be shared between Metal and AVFoundation
|
||||
/// This enables zero-copy rendering where mpv renders to Metal textures that are directly usable by AVSampleBufferDisplayLayer
|
||||
final class IOSurfaceBufferPool {
|
||||
|
||||
struct PooledBuffer {
|
||||
let pixelBuffer: CVPixelBuffer
|
||||
let texture: MTLTexture
|
||||
let ioSurface: IOSurfaceRef
|
||||
}
|
||||
|
||||
private let device: MTLDevice
|
||||
private var pool: CVPixelBufferPool?
|
||||
private var buffers: [PooledBuffer] = []
|
||||
private var availableBuffers: [PooledBuffer] = []
|
||||
private let lock = NSLock()
|
||||
|
||||
private(set) var width: Int = 0
|
||||
private(set) var height: Int = 0
|
||||
private(set) var pixelFormat: OSType = kCVPixelFormatType_32BGRA
|
||||
|
||||
private let maxBufferCount: Int
|
||||
|
||||
init(device: MTLDevice, maxBufferCount: Int = 3) {
|
||||
self.device = device
|
||||
self.maxBufferCount = maxBufferCount
|
||||
}
|
||||
|
||||
deinit {
|
||||
invalidate()
|
||||
}
|
||||
|
||||
/// Configure the pool for a specific video size and format
|
||||
func configure(width: Int, height: Int, pixelFormat: OSType = kCVPixelFormatType_32BGRA) -> Bool {
|
||||
lock.lock()
|
||||
defer { lock.unlock() }
|
||||
|
||||
guard width > 0, height > 0 else { return false }
|
||||
|
||||
// Skip if already configured for this size
|
||||
if self.width == width && self.height == height && self.pixelFormat == pixelFormat && pool != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
// Clear existing buffers
|
||||
buffers.removeAll()
|
||||
availableBuffers.removeAll()
|
||||
pool = nil
|
||||
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.pixelFormat = pixelFormat
|
||||
|
||||
// Create pixel buffer pool with IOSurface and Metal compatibility
|
||||
let pixelBufferAttributes: [CFString: Any] = [
|
||||
kCVPixelBufferPixelFormatTypeKey: pixelFormat,
|
||||
kCVPixelBufferWidthKey: width,
|
||||
kCVPixelBufferHeightKey: height,
|
||||
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary,
|
||||
kCVPixelBufferMetalCompatibilityKey: true,
|
||||
kCVPixelBufferCGImageCompatibilityKey: true,
|
||||
kCVPixelBufferCGBitmapContextCompatibilityKey: true
|
||||
]
|
||||
|
||||
let poolAttributes: [CFString: Any] = [
|
||||
kCVPixelBufferPoolMinimumBufferCountKey: maxBufferCount
|
||||
]
|
||||
|
||||
var newPool: CVPixelBufferPool?
|
||||
let status = CVPixelBufferPoolCreate(
|
||||
kCFAllocatorDefault,
|
||||
poolAttributes as CFDictionary,
|
||||
pixelBufferAttributes as CFDictionary,
|
||||
&newPool
|
||||
)
|
||||
|
||||
guard status == kCVReturnSuccess, let createdPool = newPool else {
|
||||
Logger.shared.log("Failed to create IOSurface buffer pool: \(status)", type: "Error")
|
||||
return false
|
||||
}
|
||||
|
||||
pool = createdPool
|
||||
|
||||
// Pre-allocate buffers
|
||||
for _ in 0..<maxBufferCount {
|
||||
if let buffer = createPooledBuffer() {
|
||||
buffers.append(buffer)
|
||||
availableBuffers.append(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
Logger.shared.log("IOSurfaceBufferPool configured: \(width)x\(height), \(buffers.count) buffers", type: "Info")
|
||||
return true
|
||||
}
|
||||
|
||||
/// Get an available buffer for rendering
|
||||
func dequeueBuffer() -> PooledBuffer? {
|
||||
lock.lock()
|
||||
defer { lock.unlock() }
|
||||
|
||||
if let buffer = availableBuffers.popLast() {
|
||||
return buffer
|
||||
}
|
||||
|
||||
// Try to create a new buffer if under limit
|
||||
if buffers.count < maxBufferCount, let buffer = createPooledBuffer() {
|
||||
buffers.append(buffer)
|
||||
return buffer
|
||||
}
|
||||
|
||||
// All buffers in use - create temporary one
|
||||
return createPooledBuffer()
|
||||
}
|
||||
|
||||
/// Return a buffer to the pool after use
|
||||
func enqueueBuffer(_ buffer: PooledBuffer) {
|
||||
lock.lock()
|
||||
defer { lock.unlock() }
|
||||
|
||||
// Only return to available pool if it's one of our managed buffers
|
||||
if buffers.contains(where: { $0.pixelBuffer == buffer.pixelBuffer }) {
|
||||
availableBuffers.append(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear all buffers and reset the pool
|
||||
func invalidate() {
|
||||
lock.lock()
|
||||
defer { lock.unlock() }
|
||||
|
||||
buffers.removeAll()
|
||||
availableBuffers.removeAll()
|
||||
pool = nil
|
||||
width = 0
|
||||
height = 0
|
||||
}
|
||||
|
||||
private func createPooledBuffer() -> PooledBuffer? {
|
||||
guard let pool = pool else { return nil }
|
||||
|
||||
var pixelBuffer: CVPixelBuffer?
|
||||
let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer)
|
||||
|
||||
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
|
||||
Logger.shared.log("Failed to create pixel buffer from pool: \(status)", type: "Error")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get IOSurface from pixel buffer
|
||||
guard let ioSurface = CVPixelBufferGetIOSurface(buffer)?.takeUnretainedValue() else {
|
||||
Logger.shared.log("Failed to get IOSurface from pixel buffer", type: "Error")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create Metal texture from IOSurface
|
||||
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
|
||||
pixelFormat: metalPixelFormat(for: pixelFormat),
|
||||
width: width,
|
||||
height: height,
|
||||
mipmapped: false
|
||||
)
|
||||
textureDescriptor.usage = [.renderTarget, .shaderRead, .shaderWrite]
|
||||
textureDescriptor.storageMode = .shared
|
||||
|
||||
guard let texture = device.makeTexture(descriptor: textureDescriptor, iosurface: ioSurface, plane: 0) else {
|
||||
Logger.shared.log("Failed to create Metal texture from IOSurface", type: "Error")
|
||||
return nil
|
||||
}
|
||||
|
||||
return PooledBuffer(pixelBuffer: buffer, texture: texture, ioSurface: ioSurface)
|
||||
}
|
||||
|
||||
private func metalPixelFormat(for cvFormat: OSType) -> MTLPixelFormat {
|
||||
switch cvFormat {
|
||||
case kCVPixelFormatType_32BGRA:
|
||||
return .bgra8Unorm
|
||||
case kCVPixelFormatType_32RGBA:
|
||||
return .rgba8Unorm
|
||||
case kCVPixelFormatType_64RGBAHalf:
|
||||
return .rgba16Float
|
||||
default:
|
||||
return .bgra8Unorm
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - CMSampleBuffer Creation
|
||||
|
||||
extension IOSurfaceBufferPool {
|
||||
|
||||
/// Create a CMSampleBuffer from a pooled buffer for AVSampleBufferDisplayLayer
|
||||
static func createSampleBuffer(
|
||||
from pixelBuffer: CVPixelBuffer,
|
||||
formatDescription: CMVideoFormatDescription,
|
||||
presentationTime: CMTime
|
||||
) -> CMSampleBuffer? {
|
||||
var timing = CMSampleTimingInfo(
|
||||
duration: .invalid,
|
||||
presentationTimeStamp: presentationTime,
|
||||
decodeTimeStamp: .invalid
|
||||
)
|
||||
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
let status = CMSampleBufferCreateForImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: pixelBuffer,
|
||||
dataReady: true,
|
||||
makeDataReadyCallback: nil,
|
||||
refcon: nil,
|
||||
formatDescription: formatDescription,
|
||||
sampleTiming: &timing,
|
||||
sampleBufferOut: &sampleBuffer
|
||||
)
|
||||
|
||||
guard status == noErr else {
|
||||
Logger.shared.log("Failed to create sample buffer: \(status)", type: "Error")
|
||||
return nil
|
||||
}
|
||||
|
||||
return sampleBuffer
|
||||
}
|
||||
|
||||
/// Create a format description for the current pool configuration
|
||||
func createFormatDescription() -> CMVideoFormatDescription? {
|
||||
guard let buffer = dequeueBuffer() else { return nil }
|
||||
defer { enqueueBuffer(buffer) }
|
||||
|
||||
var formatDescription: CMVideoFormatDescription?
|
||||
let status = CMVideoFormatDescriptionCreateForImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: buffer.pixelBuffer,
|
||||
formatDescriptionOut: &formatDescription
|
||||
)
|
||||
|
||||
guard status == noErr else {
|
||||
Logger.shared.log("Failed to create format description: \(status)", type: "Error")
|
||||
return nil
|
||||
}
|
||||
|
||||
return formatDescription
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import Foundation
|
||||
|
||||
class Logger {
|
||||
final class Logger {
|
||||
static let shared = Logger()
|
||||
|
||||
struct LogEntry {
|
||||
@@ -12,6 +12,7 @@ class Logger {
|
||||
private let queue = DispatchQueue(label: "mpvkit.logger", attributes: .concurrent)
|
||||
private var logs: [LogEntry] = []
|
||||
private let logFileURL: URL
|
||||
private let dateFormatter: DateFormatter
|
||||
|
||||
private let maxFileSize = 1024 * 512
|
||||
private let maxLogEntries = 1000
|
||||
@@ -19,12 +20,17 @@ class Logger {
|
||||
private init() {
|
||||
let tmpDir = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
|
||||
logFileURL = tmpDir.appendingPathComponent("logs.txt")
|
||||
|
||||
dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
|
||||
}
|
||||
|
||||
func log(_ message: String, type: String = "General") {
|
||||
let entry = LogEntry(message: message, type: type, timestamp: Date())
|
||||
|
||||
queue.async(flags: .barrier) {
|
||||
queue.async(flags: .barrier) { [weak self] in
|
||||
guard let self else { return }
|
||||
|
||||
self.logs.append(entry)
|
||||
|
||||
if self.logs.count > self.maxLogEntries {
|
||||
@@ -32,15 +38,20 @@ class Logger {
|
||||
}
|
||||
|
||||
self.saveLogToFile(entry)
|
||||
|
||||
#if DEBUG
|
||||
self.debugLog(entry)
|
||||
#endif
|
||||
|
||||
DispatchQueue.main.async {
|
||||
NotificationCenter.default.post(name: NSNotification.Name("LoggerNotification"), object: nil,
|
||||
userInfo: [
|
||||
"message": message,
|
||||
"type": type,
|
||||
"timestamp": entry.timestamp
|
||||
]
|
||||
NotificationCenter.default.post(
|
||||
name: NSNotification.Name("LoggerNotification"),
|
||||
object: nil,
|
||||
userInfo: [
|
||||
"message": message,
|
||||
"type": type,
|
||||
"timestamp": entry.timestamp
|
||||
]
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -49,8 +60,6 @@ class Logger {
|
||||
func getLogs() -> String {
|
||||
var result = ""
|
||||
queue.sync {
|
||||
let dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
|
||||
result = logs.map { "[\(dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
|
||||
.joined(separator: "\n----\n")
|
||||
}
|
||||
@@ -59,10 +68,12 @@ class Logger {
|
||||
|
||||
func getLogsAsync() async -> String {
|
||||
return await withCheckedContinuation { continuation in
|
||||
queue.async {
|
||||
let dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
|
||||
let result = self.logs.map { "[\(dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
|
||||
queue.async { [weak self] in
|
||||
guard let self else {
|
||||
continuation.resume(returning: "")
|
||||
return
|
||||
}
|
||||
let result = self.logs.map { "[\(self.dateFormatter.string(from: $0.timestamp))] [\($0.type)] \($0.message)" }
|
||||
.joined(separator: "\n----\n")
|
||||
continuation.resume(returning: result)
|
||||
}
|
||||
@@ -70,7 +81,8 @@ class Logger {
|
||||
}
|
||||
|
||||
func clearLogs() {
|
||||
queue.async(flags: .barrier) {
|
||||
queue.async(flags: .barrier) { [weak self] in
|
||||
guard let self else { return }
|
||||
self.logs.removeAll()
|
||||
try? FileManager.default.removeItem(at: self.logFileURL)
|
||||
}
|
||||
@@ -78,7 +90,11 @@ class Logger {
|
||||
|
||||
func clearLogsAsync() async {
|
||||
await withCheckedContinuation { continuation in
|
||||
queue.async(flags: .barrier) {
|
||||
queue.async(flags: .barrier) { [weak self] in
|
||||
guard let self else {
|
||||
continuation.resume()
|
||||
return
|
||||
}
|
||||
self.logs.removeAll()
|
||||
try? FileManager.default.removeItem(at: self.logFileURL)
|
||||
continuation.resume()
|
||||
@@ -87,13 +103,9 @@ class Logger {
|
||||
}
|
||||
|
||||
private func saveLogToFile(_ log: LogEntry) {
|
||||
let dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
|
||||
|
||||
let logString = "[\(dateFormatter.string(from: log.timestamp))] [\(log.type)] \(log.message)\n---\n"
|
||||
|
||||
guard let data = logString.data(using: .utf8) else {
|
||||
print("Failed to encode log string to UTF-8")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -115,7 +127,6 @@ class Logger {
|
||||
try data.write(to: logFileURL)
|
||||
}
|
||||
} catch {
|
||||
print("Error managing log file: \(error)")
|
||||
try? data.write(to: logFileURL)
|
||||
}
|
||||
}
|
||||
@@ -138,17 +149,14 @@ class Logger {
|
||||
try truncatedData.write(to: logFileURL)
|
||||
}
|
||||
} catch {
|
||||
print("Error truncating log file: \(error)")
|
||||
try? FileManager.default.removeItem(at: logFileURL)
|
||||
}
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
private func debugLog(_ entry: LogEntry) {
|
||||
#if DEBUG
|
||||
let dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = "dd-MM HH:mm:ss"
|
||||
let formattedMessage = "[\(dateFormatter.string(from: entry.timestamp))] [\(entry.type)] \(entry.message)"
|
||||
print(formattedMessage)
|
||||
#endif
|
||||
NSLog("%@", formattedMessage)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -1,45 +1,41 @@
|
||||
import UIKit
|
||||
import Metal
|
||||
import Libmpv
|
||||
import CoreMedia
|
||||
import CoreVideo
|
||||
import AVFoundation
|
||||
|
||||
protocol MPVSoftwareRendererDelegate: AnyObject {
|
||||
func renderer(_ renderer: MPVSoftwareRenderer, didUpdatePosition position: Double, duration: Double)
|
||||
func renderer(_ renderer: MPVSoftwareRenderer, didChangePause isPaused: Bool)
|
||||
func renderer(_ renderer: MPVSoftwareRenderer, didChangeLoading isLoading: Bool)
|
||||
func renderer(_ renderer: MPVSoftwareRenderer, didBecomeReadyToSeek: Bool)
|
||||
func renderer(_ renderer: MPVSoftwareRenderer, didBecomeTracksReady: Bool)
|
||||
protocol MPVMetalRendererDelegate: AnyObject {
|
||||
func renderer(_ renderer: MPVMetalRenderer, didUpdatePosition position: Double, duration: Double)
|
||||
func renderer(_ renderer: MPVMetalRenderer, didChangePause isPaused: Bool)
|
||||
func renderer(_ renderer: MPVMetalRenderer, didChangeLoading isLoading: Bool)
|
||||
func renderer(_ renderer: MPVMetalRenderer, didBecomeReadyToSeek: Bool)
|
||||
func renderer(_ renderer: MPVMetalRenderer, didBecomeTracksReady: Bool)
|
||||
}
|
||||
|
||||
final class MPVSoftwareRenderer {
|
||||
final class MPVMetalRenderer {
|
||||
enum RendererError: Error {
|
||||
case metalNotSupported
|
||||
case mpvCreationFailed
|
||||
case mpvInitialization(Int32)
|
||||
case renderContextCreation(Int32)
|
||||
}
|
||||
|
||||
private let displayLayer: AVSampleBufferDisplayLayer
|
||||
private let renderQueue = DispatchQueue(label: "mpv.software.render", qos: .userInitiated)
|
||||
private let eventQueue = DispatchQueue(label: "mpv.software.events", qos: .utility)
|
||||
private let stateQueue = DispatchQueue(label: "mpv.software.state", attributes: .concurrent)
|
||||
private let renderQueue = DispatchQueue(label: "mpv.metal.render", qos: .userInteractive)
|
||||
private let eventQueue = DispatchQueue(label: "mpv.metal.events", qos: .utility)
|
||||
private let stateQueue = DispatchQueue(label: "mpv.metal.state", attributes: .concurrent)
|
||||
private let eventQueueGroup = DispatchGroup()
|
||||
private let renderQueueKey = DispatchSpecificKey<Void>()
|
||||
|
||||
private var dimensionsArray = [Int32](repeating: 0, count: 2)
|
||||
private var renderParams = [mpv_render_param](repeating: mpv_render_param(type: MPV_RENDER_PARAM_INVALID, data: nil), count: 5)
|
||||
private var device: MTLDevice?
|
||||
private var commandQueue: MTLCommandQueue?
|
||||
private var bufferPool: IOSurfaceBufferPool?
|
||||
private var formatDescription: CMVideoFormatDescription?
|
||||
|
||||
private var mpv: OpaquePointer?
|
||||
private var renderContext: OpaquePointer?
|
||||
private var videoSize: CGSize = .zero
|
||||
private var pixelBufferPool: CVPixelBufferPool?
|
||||
private var pixelBufferPoolAuxAttributes: CFDictionary?
|
||||
private var formatDescription: CMVideoFormatDescription?
|
||||
private var didFlushForFormatChange = false
|
||||
private var poolWidth: Int = 0
|
||||
private var poolHeight: Int = 0
|
||||
private var preAllocatedBuffers: [CVPixelBuffer] = []
|
||||
private let maxPreAllocatedBuffers = 12
|
||||
|
||||
private var currentPreset: PlayerPreset?
|
||||
private var currentURL: URL?
|
||||
@@ -52,22 +48,17 @@ final class MPVSoftwareRenderer {
|
||||
|
||||
private var isRunning = false
|
||||
private var isStopping = false
|
||||
private var shouldClearPixelBuffer = false
|
||||
private let bgraFormatCString: [CChar] = Array("bgra\0".utf8CString)
|
||||
private let maxInFlightBuffers = 3
|
||||
private var inFlightBufferCount = 0
|
||||
private let inFlightLock = NSLock()
|
||||
|
||||
weak var delegate: MPVSoftwareRendererDelegate?
|
||||
weak var delegate: MPVMetalRendererDelegate?
|
||||
|
||||
// Thread-safe state for playback (uses existing stateQueue to prevent races causing stutter)
|
||||
// Thread-safe state
|
||||
private var _cachedDuration: Double = 0
|
||||
private var _cachedPosition: Double = 0
|
||||
private var _isPaused: Bool = true
|
||||
private var _playbackSpeed: Double = 1.0
|
||||
private var _isSeeking: Bool = false
|
||||
private var _positionUpdateTime: CFTimeInterval = 0 // Host time when position was last updated
|
||||
private var _lastPTS: Double = 0 // Last presentation timestamp (ensures monotonic increase)
|
||||
private var _positionUpdateTime: CFTimeInterval = 0
|
||||
private var _lastPTS: Double = 0
|
||||
|
||||
// Thread-safe accessors
|
||||
private var cachedDuration: Double {
|
||||
@@ -99,33 +90,6 @@ final class MPVSoftwareRenderer {
|
||||
set { stateQueue.async(flags: .barrier) { self._lastPTS = newValue } }
|
||||
}
|
||||
|
||||
/// Get next monotonically increasing PTS based on video position
|
||||
/// This ensures frames always have increasing timestamps (prevents stutter from drops)
|
||||
private func nextMonotonicPTS() -> Double {
|
||||
let currentPos = interpolatedPosition()
|
||||
let last = lastPTS
|
||||
|
||||
// Ensure PTS always increases (by at least 1ms) to prevent frame drops
|
||||
let pts = max(currentPos, last + 0.001)
|
||||
lastPTS = pts
|
||||
return pts
|
||||
}
|
||||
|
||||
/// Calculate smooth interpolated position based on last known position + elapsed time
|
||||
private func interpolatedPosition() -> Double {
|
||||
let basePosition = cachedPosition
|
||||
let lastUpdate = positionUpdateTime
|
||||
let paused = isPaused
|
||||
let speed = playbackSpeed
|
||||
|
||||
guard !paused, lastUpdate > 0 else {
|
||||
return basePosition
|
||||
}
|
||||
|
||||
let elapsed = CACurrentMediaTime() - lastUpdate
|
||||
return basePosition + (elapsed * speed)
|
||||
}
|
||||
|
||||
private var isLoading: Bool = false
|
||||
private var isRenderScheduled = false
|
||||
private var lastRenderTime: CFTimeInterval = 0
|
||||
@@ -137,15 +101,22 @@ final class MPVSoftwareRenderer {
|
||||
return isPaused
|
||||
}
|
||||
|
||||
init(displayLayer: AVSampleBufferDisplayLayer) {
|
||||
guard
|
||||
let screen = UIApplication.shared.connectedScenes
|
||||
.compactMap({ ($0 as? UIWindowScene)?.screen })
|
||||
.first
|
||||
else {
|
||||
fatalError("⚠️ No active screen found — app may not have a visible window yet.")
|
||||
init(displayLayer: AVSampleBufferDisplayLayer) throws {
|
||||
guard let device = MTLCreateSystemDefaultDevice() else {
|
||||
throw RendererError.metalNotSupported
|
||||
}
|
||||
self.device = device
|
||||
self.commandQueue = device.makeCommandQueue()
|
||||
self.displayLayer = displayLayer
|
||||
self.bufferPool = IOSurfaceBufferPool(device: device, maxBufferCount: 3)
|
||||
|
||||
guard let screen = UIApplication.shared.connectedScenes
|
||||
.compactMap({ ($0 as? UIWindowScene)?.screen })
|
||||
.first
|
||||
else {
|
||||
throw RendererError.metalNotSupported
|
||||
}
|
||||
|
||||
let maxFPS = screen.maximumFramesPerSecond
|
||||
let cappedFPS = min(maxFPS, 60)
|
||||
self.minRenderInterval = 1.0 / CFTimeInterval(cappedFPS)
|
||||
@@ -162,27 +133,29 @@ final class MPVSoftwareRenderer {
|
||||
throw RendererError.mpvCreationFailed
|
||||
}
|
||||
mpv = handle
|
||||
|
||||
// Core options
|
||||
setOption(name: "terminal", value: "yes")
|
||||
setOption(name: "msg-level", value: "status")
|
||||
setOption(name: "keep-open", value: "yes")
|
||||
setOption(name: "idle", value: "yes")
|
||||
setOption(name: "vo", value: "libmpv")
|
||||
setOption(name: "hwdec", value: "videotoolbox-copy")
|
||||
setOption(name: "gpu-api", value: "metal")
|
||||
setOption(name: "gpu-context", value: "metal")
|
||||
|
||||
// Hardware decoding - zero-copy for maximum GPU efficiency
|
||||
setOption(name: "hwdec", value: "videotoolbox")
|
||||
|
||||
// Performance options
|
||||
setOption(name: "demuxer-thread", value: "yes")
|
||||
setOption(name: "ytdl", value: "yes")
|
||||
setOption(name: "profile", value: "fast")
|
||||
setOption(name: "vd-lavc-threads", value: "8")
|
||||
setOption(name: "vd-lavc-threads", value: "0") // Auto-detect
|
||||
setOption(name: "cache", value: "yes")
|
||||
setOption(name: "demuxer-max-bytes", value: "150M")
|
||||
setOption(name: "demuxer-readahead-secs", value: "20")
|
||||
|
||||
// Subtitle options - use vf=sub to burn subtitles into video frames
|
||||
// This happens at the filter level, BEFORE the software renderer
|
||||
// Subtitle options - burn into video frames
|
||||
setOption(name: "vf", value: "sub")
|
||||
setOption(name: "sub-visibility", value: "yes")
|
||||
|
||||
|
||||
let initStatus = mpv_initialize(handle)
|
||||
guard initStatus >= 0 else {
|
||||
throw RendererError.mpvInitialization(initStatus)
|
||||
@@ -221,11 +194,7 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
|
||||
self.formatDescription = nil
|
||||
self.preAllocatedBuffers.removeAll()
|
||||
self.pixelBufferPool = nil
|
||||
self.poolWidth = 0
|
||||
self.poolHeight = 0
|
||||
self.lastRenderDimensions = .zero
|
||||
self.bufferPool?.invalidate()
|
||||
}
|
||||
|
||||
eventQueueGroup.wait()
|
||||
@@ -238,14 +207,6 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
self.mpv = nil
|
||||
|
||||
self.preAllocatedBuffers.removeAll()
|
||||
self.pixelBufferPool = nil
|
||||
self.pixelBufferPoolAuxAttributes = nil
|
||||
self.formatDescription = nil
|
||||
self.poolWidth = 0
|
||||
self.poolHeight = 0
|
||||
self.lastRenderDimensions = .zero
|
||||
|
||||
self.disposeBag.forEach { $0() }
|
||||
self.disposeBag.removeAll()
|
||||
}
|
||||
@@ -290,23 +251,19 @@ final class MPVSoftwareRenderer {
|
||||
guard let handle = self.mpv else { return }
|
||||
|
||||
self.apply(commands: preset.commands, on: handle)
|
||||
// Sync stop to ensure previous playback is stopped before loading new file
|
||||
self.commandSync(handle, ["stop"])
|
||||
self.updateHTTPHeaders(headers)
|
||||
|
||||
// Set start position using property (setOption only works before mpv_initialize)
|
||||
if let startPos = startPosition, startPos > 0 {
|
||||
self.setProperty(name: "start", value: String(format: "%.2f", startPos))
|
||||
} else {
|
||||
self.setProperty(name: "start", value: "0")
|
||||
}
|
||||
|
||||
// Set initial audio track if specified
|
||||
if let audioId = self.initialAudioId, audioId > 0 {
|
||||
self.setAudioTrack(audioId)
|
||||
}
|
||||
|
||||
// Set initial subtitle track if no external subs (external subs change track IDs)
|
||||
if self.pendingExternalSubtitles.isEmpty {
|
||||
if let subId = self.initialSubtitleId {
|
||||
self.setSubtitleTrack(subId)
|
||||
@@ -314,7 +271,6 @@ final class MPVSoftwareRenderer {
|
||||
self.disableSubtitles()
|
||||
}
|
||||
} else {
|
||||
// External subs will be added after file loads, set sid then
|
||||
self.disableSubtitles()
|
||||
}
|
||||
|
||||
@@ -342,6 +298,8 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - MPV Configuration
|
||||
|
||||
private func setOption(name: String, value: String) {
|
||||
guard let handle = mpv else { return }
|
||||
_ = value.withCString { valuePointer in
|
||||
@@ -380,16 +338,18 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
|
||||
let headerString = headers
|
||||
.map { key, value in
|
||||
"\(key): \(value)"
|
||||
}
|
||||
.map { key, value in "\(key): \(value)" }
|
||||
.joined(separator: "\r\n")
|
||||
setProperty(name: "http-header-fields", value: headerString)
|
||||
}
|
||||
|
||||
// MARK: - Render Context
|
||||
|
||||
private func createRenderContext() throws {
|
||||
guard let handle = mpv else { return }
|
||||
|
||||
// Use software rendering API but with our IOSurface-backed Metal textures
|
||||
// This gives us the frame data while still leveraging hardware decoding
|
||||
var apiType = MPV_RENDER_API_TYPE_SW
|
||||
let status = withUnsafePointer(to: &apiType) { apiTypePtr in
|
||||
var params = [
|
||||
@@ -410,7 +370,7 @@ final class MPVSoftwareRenderer {
|
||||
|
||||
mpv_render_context_set_update_callback(renderContext, { context in
|
||||
guard let context = context else { return }
|
||||
let instance = Unmanaged<MPVSoftwareRenderer>.fromOpaque(context).takeUnretainedValue()
|
||||
let instance = Unmanaged<MPVMetalRenderer>.fromOpaque(context).takeUnretainedValue()
|
||||
instance.scheduleRender()
|
||||
}, Unmanaged.passUnretained(self).toOpaque())
|
||||
}
|
||||
@@ -423,7 +383,7 @@ final class MPVSoftwareRenderer {
|
||||
("duration", MPV_FORMAT_DOUBLE),
|
||||
("time-pos", MPV_FORMAT_DOUBLE),
|
||||
("pause", MPV_FORMAT_FLAG),
|
||||
("track-list/count", MPV_FORMAT_INT64) // Notify when tracks are available
|
||||
("track-list/count", MPV_FORMAT_INT64)
|
||||
]
|
||||
|
||||
for (name, format) in properties {
|
||||
@@ -437,7 +397,7 @@ final class MPVSoftwareRenderer {
|
||||
guard let handle = mpv else { return }
|
||||
mpv_set_wakeup_callback(handle, { userdata in
|
||||
guard let userdata else { return }
|
||||
let instance = Unmanaged<MPVSoftwareRenderer>.fromOpaque(userdata).takeUnretainedValue()
|
||||
let instance = Unmanaged<MPVMetalRenderer>.fromOpaque(userdata).takeUnretainedValue()
|
||||
instance.processEvents()
|
||||
}, Unmanaged.passUnretained(self).toOpaque())
|
||||
renderQueue.async { [weak self] in
|
||||
@@ -449,6 +409,8 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Rendering
|
||||
|
||||
private func scheduleRender() {
|
||||
renderQueue.async { [weak self] in
|
||||
guard let self, self.isRunning, !self.isStopping else { return }
|
||||
@@ -491,82 +453,55 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
private var dimensionsArray = [Int32](repeating: 0, count: 2)
|
||||
private var renderParams = [mpv_render_param](repeating: mpv_render_param(type: MPV_RENDER_PARAM_INVALID, data: nil), count: 5)
|
||||
private let bgraFormatCString: [CChar] = Array("bgra\0".utf8CString)
|
||||
|
||||
private func renderFrame() {
|
||||
guard let context = renderContext else { return }
|
||||
guard let context = renderContext, let bufferPool = bufferPool else { return }
|
||||
let videoSize = currentVideoSize()
|
||||
guard videoSize.width > 0, videoSize.height > 0 else { return }
|
||||
|
||||
let targetSize = targetRenderSize(for: videoSize)
|
||||
let width = Int(targetSize.width)
|
||||
let height = Int(targetSize.height)
|
||||
let width = Int(videoSize.width)
|
||||
let height = Int(videoSize.height)
|
||||
guard width > 0, height > 0 else { return }
|
||||
if lastRenderDimensions != targetSize {
|
||||
lastRenderDimensions = targetSize
|
||||
if targetSize != videoSize {
|
||||
Logger.shared.log("Rendering scaled output at \(width)x\(height) (source \(Int(videoSize.width))x\(Int(videoSize.height)))", type: "Info")
|
||||
} else {
|
||||
Logger.shared.log("Rendering output at native size \(width)x\(height)", type: "Info")
|
||||
|
||||
// Configure buffer pool if needed
|
||||
if bufferPool.width != width || bufferPool.height != height {
|
||||
if !bufferPool.configure(width: width, height: height) {
|
||||
Logger.shared.log("Failed to configure buffer pool for \(width)x\(height)", type: "Error")
|
||||
return
|
||||
}
|
||||
formatDescription = bufferPool.createFormatDescription()
|
||||
|
||||
// Flush display layer on format change
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
if #available(iOS 18.0, *) {
|
||||
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
|
||||
} else {
|
||||
self.displayLayer.flushAndRemoveImage()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if poolWidth != width || poolHeight != height {
|
||||
recreatePixelBufferPool(width: width, height: height)
|
||||
}
|
||||
|
||||
var pixelBuffer: CVPixelBuffer?
|
||||
var status: CVReturn = kCVReturnError
|
||||
|
||||
if !preAllocatedBuffers.isEmpty {
|
||||
pixelBuffer = preAllocatedBuffers.removeFirst()
|
||||
status = kCVReturnSuccess
|
||||
} else if let pool = pixelBufferPool {
|
||||
status = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, pixelBufferPoolAuxAttributes, &pixelBuffer)
|
||||
}
|
||||
|
||||
if status != kCVReturnSuccess || pixelBuffer == nil {
|
||||
let attrs: [CFString: Any] = [
|
||||
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary,
|
||||
kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue!,
|
||||
kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue!,
|
||||
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue!,
|
||||
kCVPixelBufferWidthKey: width,
|
||||
kCVPixelBufferHeightKey: height,
|
||||
kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA
|
||||
]
|
||||
status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, attrs as CFDictionary, &pixelBuffer)
|
||||
}
|
||||
|
||||
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
|
||||
Logger.shared.log("Failed to create pixel buffer for rendering (status: \(status))", type: "Error")
|
||||
guard let pooledBuffer = bufferPool.dequeueBuffer() else {
|
||||
Logger.shared.log("Failed to dequeue buffer from pool", type: "Error")
|
||||
return
|
||||
}
|
||||
|
||||
let actualFormat = CVPixelBufferGetPixelFormatType(buffer)
|
||||
if actualFormat != kCVPixelFormatType_32BGRA {
|
||||
Logger.shared.log("Pixel buffer format mismatch: expected BGRA (0x42475241), got \(actualFormat)", type: "Error")
|
||||
}
|
||||
|
||||
CVPixelBufferLockBaseAddress(buffer, [])
|
||||
guard let baseAddress = CVPixelBufferGetBaseAddress(buffer) else {
|
||||
CVPixelBufferUnlockBaseAddress(buffer, [])
|
||||
// Render to the IOSurface-backed pixel buffer
|
||||
// The pixel buffer is Metal-compatible so this render goes through GPU when possible
|
||||
CVPixelBufferLockBaseAddress(pooledBuffer.pixelBuffer, [])
|
||||
guard let baseAddress = CVPixelBufferGetBaseAddress(pooledBuffer.pixelBuffer) else {
|
||||
CVPixelBufferUnlockBaseAddress(pooledBuffer.pixelBuffer, [])
|
||||
bufferPool.enqueueBuffer(pooledBuffer)
|
||||
return
|
||||
}
|
||||
|
||||
if shouldClearPixelBuffer {
|
||||
let bufferDataSize = CVPixelBufferGetDataSize(buffer)
|
||||
memset(baseAddress, 0, bufferDataSize)
|
||||
shouldClearPixelBuffer = false
|
||||
}
|
||||
|
||||
dimensionsArray[0] = Int32(width)
|
||||
dimensionsArray[1] = Int32(height)
|
||||
let stride = Int32(CVPixelBufferGetBytesPerRow(buffer))
|
||||
let expectedMinStride = Int32(width * 4)
|
||||
if stride < expectedMinStride {
|
||||
Logger.shared.log("Unexpected pixel buffer stride \(stride) < expected \(expectedMinStride) — skipping render to avoid memory corruption", type: "Error")
|
||||
CVPixelBufferUnlockBaseAddress(buffer, [])
|
||||
return
|
||||
}
|
||||
let stride = Int32(CVPixelBufferGetBytesPerRow(pooledBuffer.pixelBuffer))
|
||||
|
||||
let pointerValue = baseAddress
|
||||
dimensionsArray.withUnsafeMutableBufferPointer { dimsPointer in
|
||||
@@ -586,136 +521,35 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
CVPixelBufferUnlockBaseAddress(buffer, [])
|
||||
CVPixelBufferUnlockBaseAddress(pooledBuffer.pixelBuffer, [])
|
||||
|
||||
enqueue(buffer: buffer)
|
||||
|
||||
if preAllocatedBuffers.count < 4 {
|
||||
renderQueue.async { [weak self] in
|
||||
self?.preAllocateBuffers()
|
||||
}
|
||||
}
|
||||
// Enqueue to display layer
|
||||
enqueue(buffer: pooledBuffer)
|
||||
}
|
||||
|
||||
private func targetRenderSize(for videoSize: CGSize) -> CGSize {
|
||||
guard videoSize.width > 0, videoSize.height > 0 else { return videoSize }
|
||||
guard
|
||||
let screen = UIApplication.shared.connectedScenes
|
||||
.compactMap({ ($0 as? UIWindowScene)?.screen })
|
||||
.first
|
||||
else {
|
||||
fatalError("⚠️ No active screen found — app may not have a visible window yet.")
|
||||
}
|
||||
var scale = screen.scale
|
||||
if scale <= 0 { scale = 1 }
|
||||
let maxWidth = max(screen.bounds.width * scale, 1.0)
|
||||
let maxHeight = max(screen.bounds.height * scale, 1.0)
|
||||
if maxWidth <= 0 || maxHeight <= 0 {
|
||||
return videoSize
|
||||
}
|
||||
let widthRatio = videoSize.width / maxWidth
|
||||
let heightRatio = videoSize.height / maxHeight
|
||||
let ratio = max(widthRatio, heightRatio, 1)
|
||||
let targetWidth = max(1, Int(videoSize.width / ratio))
|
||||
let targetHeight = max(1, Int(videoSize.height / ratio))
|
||||
return CGSize(width: CGFloat(targetWidth), height: CGFloat(targetHeight))
|
||||
private func nextMonotonicPTS() -> Double {
|
||||
let currentPos = interpolatedPosition()
|
||||
let last = lastPTS
|
||||
let pts = max(currentPos, last + 0.001)
|
||||
lastPTS = pts
|
||||
return pts
|
||||
}
|
||||
|
||||
private func createPixelBufferPool(width: Int, height: Int) {
|
||||
guard width > 0, height > 0 else { return }
|
||||
private func interpolatedPosition() -> Double {
|
||||
let basePosition = cachedPosition
|
||||
let lastUpdate = positionUpdateTime
|
||||
let paused = isPaused
|
||||
let speed = playbackSpeed
|
||||
|
||||
let pixelFormat = kCVPixelFormatType_32BGRA
|
||||
|
||||
let attrs: [CFString: Any] = [
|
||||
kCVPixelBufferPixelFormatTypeKey: pixelFormat,
|
||||
kCVPixelBufferWidthKey: width,
|
||||
kCVPixelBufferHeightKey: height,
|
||||
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary,
|
||||
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue!,
|
||||
kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue!,
|
||||
kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue!
|
||||
]
|
||||
|
||||
let poolAttrs: [CFString: Any] = [
|
||||
kCVPixelBufferPoolMinimumBufferCountKey: maxPreAllocatedBuffers,
|
||||
kCVPixelBufferPoolMaximumBufferAgeKey: 0
|
||||
]
|
||||
|
||||
let auxAttrs: [CFString: Any] = [
|
||||
kCVPixelBufferPoolAllocationThresholdKey: 8
|
||||
]
|
||||
|
||||
var pool: CVPixelBufferPool?
|
||||
let status = CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttrs as CFDictionary, attrs as CFDictionary, &pool)
|
||||
if status == kCVReturnSuccess, let pool {
|
||||
renderQueueSync {
|
||||
self.pixelBufferPool = pool
|
||||
self.pixelBufferPoolAuxAttributes = auxAttrs as CFDictionary
|
||||
self.poolWidth = width
|
||||
self.poolHeight = height
|
||||
}
|
||||
|
||||
renderQueue.async { [weak self] in
|
||||
self?.preAllocateBuffers()
|
||||
}
|
||||
} else {
|
||||
Logger.shared.log("Failed to create CVPixelBufferPool (status: \(status))", type: "Error")
|
||||
guard !paused, lastUpdate > 0 else {
|
||||
return basePosition
|
||||
}
|
||||
|
||||
let elapsed = CACurrentMediaTime() - lastUpdate
|
||||
return basePosition + (elapsed * speed)
|
||||
}
|
||||
|
||||
private func recreatePixelBufferPool(width: Int, height: Int) {
|
||||
renderQueueSync {
|
||||
self.preAllocatedBuffers.removeAll()
|
||||
self.pixelBufferPool = nil
|
||||
self.formatDescription = nil
|
||||
self.poolWidth = 0
|
||||
self.poolHeight = 0
|
||||
}
|
||||
|
||||
createPixelBufferPool(width: width, height: height)
|
||||
}
|
||||
|
||||
private func preAllocateBuffers() {
|
||||
guard DispatchQueue.getSpecific(key: renderQueueKey) != nil else {
|
||||
renderQueue.async { [weak self] in
|
||||
self?.preAllocateBuffers()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
guard let pool = pixelBufferPool else { return }
|
||||
|
||||
let targetCount = min(maxPreAllocatedBuffers, 8)
|
||||
let currentCount = preAllocatedBuffers.count
|
||||
|
||||
guard currentCount < targetCount else { return }
|
||||
|
||||
let bufferCount = targetCount - currentCount
|
||||
|
||||
for _ in 0..<bufferCount {
|
||||
var buffer: CVPixelBuffer?
|
||||
let status = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(
|
||||
kCFAllocatorDefault,
|
||||
pool,
|
||||
pixelBufferPoolAuxAttributes,
|
||||
&buffer
|
||||
)
|
||||
|
||||
if status == kCVReturnSuccess, let buffer = buffer {
|
||||
if preAllocatedBuffers.count < maxPreAllocatedBuffers {
|
||||
preAllocatedBuffers.append(buffer)
|
||||
}
|
||||
} else {
|
||||
if status != kCVReturnWouldExceedAllocationThreshold {
|
||||
Logger.shared.log("Failed to pre-allocate buffer (status: \(status))", type: "Warn")
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func enqueue(buffer: CVPixelBuffer) {
|
||||
let needsFlush = updateFormatDescriptionIfNeeded(for: buffer)
|
||||
private func enqueue(buffer: IOSurfaceBufferPool.PooledBuffer) {
|
||||
var shouldNotifyLoadingEnd = false
|
||||
renderQueueSync {
|
||||
if self.isLoading {
|
||||
@@ -723,45 +557,27 @@ final class MPVSoftwareRenderer {
|
||||
shouldNotifyLoadingEnd = true
|
||||
}
|
||||
}
|
||||
var capturedFormatDescription: CMVideoFormatDescription?
|
||||
renderQueueSync {
|
||||
capturedFormatDescription = self.formatDescription
|
||||
}
|
||||
|
||||
guard let formatDescription = capturedFormatDescription else {
|
||||
Logger.shared.log("Missing formatDescription when creating sample buffer — skipping frame", type: "Error")
|
||||
guard let formatDescription = formatDescription else {
|
||||
Logger.shared.log("Missing formatDescription when creating sample buffer", type: "Error")
|
||||
bufferPool?.enqueueBuffer(buffer)
|
||||
return
|
||||
}
|
||||
|
||||
// Use interpolated position for smooth PTS (prevents jitter from discrete time-pos updates)
|
||||
// Use monotonically increasing video position for smooth PTS + working PiP progress
|
||||
let presentationTime = CMTime(seconds: nextMonotonicPTS(), preferredTimescale: 1000)
|
||||
var timing = CMSampleTimingInfo(duration: .invalid, presentationTimeStamp: presentationTime, decodeTimeStamp: .invalid)
|
||||
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
let result = CMSampleBufferCreateForImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: buffer,
|
||||
dataReady: true,
|
||||
makeDataReadyCallback: nil,
|
||||
refcon: nil,
|
||||
guard let sampleBuffer = IOSurfaceBufferPool.createSampleBuffer(
|
||||
from: buffer.pixelBuffer,
|
||||
formatDescription: formatDescription,
|
||||
sampleTiming: &timing,
|
||||
sampleBufferOut: &sampleBuffer
|
||||
)
|
||||
|
||||
guard result == noErr, let sample = sampleBuffer else {
|
||||
Logger.shared.log("Failed to create sample buffer (error: \(result), -12743 = invalid format)", type: "Error")
|
||||
|
||||
let width = CVPixelBufferGetWidth(buffer)
|
||||
let height = CVPixelBufferGetHeight(buffer)
|
||||
let pixelFormat = CVPixelBufferGetPixelFormatType(buffer)
|
||||
Logger.shared.log("Buffer info: \(width)x\(height), format: \(pixelFormat)", type: "Error")
|
||||
presentationTime: presentationTime
|
||||
) else {
|
||||
bufferPool?.enqueueBuffer(buffer)
|
||||
return
|
||||
}
|
||||
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
|
||||
let (status, error): (AVQueuedSampleBufferRenderingStatus?, Error?) = {
|
||||
if #available(iOS 18.0, *) {
|
||||
return (
|
||||
@@ -775,9 +591,10 @@ final class MPVSoftwareRenderer {
|
||||
)
|
||||
}
|
||||
}()
|
||||
|
||||
if status == .failed {
|
||||
if let error = error {
|
||||
Logger.shared.log("Display layer in failed state: \(error.localizedDescription)", type: "Error")
|
||||
Logger.shared.log("Display layer failed: \(error.localizedDescription)", type: "Error")
|
||||
}
|
||||
if #available(iOS 18.0, *) {
|
||||
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
|
||||
@@ -786,86 +603,30 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
if needsFlush {
|
||||
if #available(iOS 18.0, *) {
|
||||
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
|
||||
} else {
|
||||
self.displayLayer.flushAndRemoveImage()
|
||||
}
|
||||
self.didFlushForFormatChange = true
|
||||
} else if self.didFlushForFormatChange {
|
||||
if #available(iOS 18.0, *) {
|
||||
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: false, completionHandler: nil)
|
||||
} else {
|
||||
self.displayLayer.flush()
|
||||
}
|
||||
self.didFlushForFormatChange = false
|
||||
}
|
||||
|
||||
if self.displayLayer.controlTimebase == nil {
|
||||
var timebase: CMTimebase?
|
||||
if CMTimebaseCreateWithSourceClock(allocator: kCFAllocatorDefault, sourceClock: CMClockGetHostTimeClock(), timebaseOut: &timebase) == noErr, let timebase {
|
||||
// Set rate based on current pause state and playback speed
|
||||
CMTimebaseSetRate(timebase, rate: self.isPaused ? 0 : self.playbackSpeed)
|
||||
CMTimebaseSetTime(timebase, time: presentationTime)
|
||||
self.displayLayer.controlTimebase = timebase
|
||||
} else {
|
||||
Logger.shared.log("Failed to create control timebase", type: "Error")
|
||||
}
|
||||
}
|
||||
|
||||
if shouldNotifyLoadingEnd {
|
||||
self.delegate?.renderer(self, didChangeLoading: false)
|
||||
}
|
||||
|
||||
if #available(iOS 18.0, *) {
|
||||
self.displayLayer.sampleBufferRenderer.enqueue(sample)
|
||||
self.displayLayer.sampleBufferRenderer.enqueue(sampleBuffer)
|
||||
} else {
|
||||
self.displayLayer.enqueue(sample)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func updateFormatDescriptionIfNeeded(for buffer: CVPixelBuffer) -> Bool {
|
||||
var didChange = false
|
||||
let width = Int32(CVPixelBufferGetWidth(buffer))
|
||||
let height = Int32(CVPixelBufferGetHeight(buffer))
|
||||
let pixelFormat = CVPixelBufferGetPixelFormatType(buffer)
|
||||
|
||||
renderQueueSync {
|
||||
var needsRecreate = false
|
||||
|
||||
if let description = formatDescription {
|
||||
let currentDimensions = CMVideoFormatDescriptionGetDimensions(description)
|
||||
let currentPixelFormat = CMFormatDescriptionGetMediaSubType(description)
|
||||
|
||||
if currentDimensions.width != width ||
|
||||
currentDimensions.height != height ||
|
||||
currentPixelFormat != pixelFormat {
|
||||
needsRecreate = true
|
||||
}
|
||||
} else {
|
||||
needsRecreate = true
|
||||
self.displayLayer.enqueue(sampleBuffer)
|
||||
}
|
||||
|
||||
if needsRecreate {
|
||||
var newDescription: CMVideoFormatDescription?
|
||||
|
||||
let status = CMVideoFormatDescriptionCreateForImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: buffer,
|
||||
formatDescriptionOut: &newDescription
|
||||
)
|
||||
|
||||
if status == noErr, let newDescription = newDescription {
|
||||
formatDescription = newDescription
|
||||
didChange = true
|
||||
Logger.shared.log("Created new format description: \(width)x\(height), format: \(pixelFormat)", type: "Info")
|
||||
} else {
|
||||
Logger.shared.log("Failed to create format description (status: \(status))", type: "Error")
|
||||
}
|
||||
// Return buffer to pool after a short delay to ensure it's been displayed
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { [weak self] in
|
||||
self?.bufferPool?.enqueueBuffer(buffer)
|
||||
}
|
||||
}
|
||||
return didChange
|
||||
}
|
||||
|
||||
private func renderQueueSync(_ block: () -> Void) {
|
||||
@@ -877,9 +638,7 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
|
||||
private func currentVideoSize() -> CGSize {
|
||||
stateQueue.sync {
|
||||
videoSize
|
||||
}
|
||||
stateQueue.sync { videoSize }
|
||||
}
|
||||
|
||||
private func updateVideoSize(width: Int, height: Int) {
|
||||
@@ -887,15 +646,10 @@ final class MPVSoftwareRenderer {
|
||||
stateQueue.async(flags: .barrier) {
|
||||
self.videoSize = size
|
||||
}
|
||||
renderQueue.async { [weak self] in
|
||||
guard let self else { return }
|
||||
|
||||
if self.poolWidth != width || self.poolHeight != height {
|
||||
self.recreatePixelBufferPool(width: max(width, 0), height: max(height, 0))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Commands
|
||||
|
||||
private func apply(commands: [[String]], on handle: OpaquePointer) {
|
||||
for command in commands {
|
||||
guard !command.isEmpty else { continue }
|
||||
@@ -903,7 +657,6 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
/// Async command - returns immediately, mpv processes later
|
||||
private func command(_ handle: OpaquePointer, _ args: [String]) {
|
||||
guard !args.isEmpty else { return }
|
||||
_ = withCStringArray(args) { pointer in
|
||||
@@ -911,7 +664,7 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
/// Sync command - waits for mpv to process before returning
|
||||
@discardableResult
|
||||
private func commandSync(_ handle: OpaquePointer, _ args: [String]) -> Int32 {
|
||||
guard !args.isEmpty else { return -1 }
|
||||
return withCStringArray(args) { pointer in
|
||||
@@ -919,6 +672,8 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Event Processing
|
||||
|
||||
private func processEvents() {
|
||||
eventQueueGroup.enter()
|
||||
let group = eventQueueGroup
|
||||
@@ -941,7 +696,6 @@ final class MPVSoftwareRenderer {
|
||||
case MPV_EVENT_VIDEO_RECONFIG:
|
||||
refreshVideoState()
|
||||
case MPV_EVENT_FILE_LOADED:
|
||||
// Add external subtitles now that the file is loaded
|
||||
let hadExternalSubs = !pendingExternalSubtitles.isEmpty
|
||||
if hadExternalSubs, let handle = mpv {
|
||||
for subUrl in pendingExternalSubtitles {
|
||||
@@ -949,7 +703,6 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
pendingExternalSubtitles = []
|
||||
|
||||
// Set subtitle after external subs are added (track IDs have changed)
|
||||
if let subId = initialSubtitleId {
|
||||
setSubtitleTrack(subId)
|
||||
} else {
|
||||
@@ -1007,13 +760,12 @@ final class MPVSoftwareRenderer {
|
||||
delegate?.renderer(self, didUpdatePosition: cachedPosition, duration: cachedDuration)
|
||||
}
|
||||
case "time-pos":
|
||||
// Skip updates while seeking to prevent race condition
|
||||
guard !isSeeking else { return }
|
||||
var value = Double(0)
|
||||
let status = getProperty(handle: handle, name: name, format: MPV_FORMAT_DOUBLE, value: &value)
|
||||
if status >= 0 {
|
||||
cachedPosition = value
|
||||
positionUpdateTime = CACurrentMediaTime() // Record when we got this update
|
||||
positionUpdateTime = CACurrentMediaTime()
|
||||
delegate?.renderer(self, didUpdatePosition: cachedPosition, duration: cachedDuration)
|
||||
}
|
||||
case "pause":
|
||||
@@ -1023,7 +775,6 @@ final class MPVSoftwareRenderer {
|
||||
let newPaused = flag != 0
|
||||
if newPaused != isPaused {
|
||||
isPaused = newPaused
|
||||
// Update timebase rate - use playbackSpeed when playing, 0 when paused
|
||||
let speed = self.playbackSpeed
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
if let timebase = self?.displayLayer.controlTimebase {
|
||||
@@ -1090,6 +841,7 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
|
||||
// MARK: - Playback Controls
|
||||
|
||||
func play() {
|
||||
setProperty(name: "pause", value: "no")
|
||||
}
|
||||
@@ -1106,18 +858,13 @@ final class MPVSoftwareRenderer {
|
||||
guard let handle = mpv else { return }
|
||||
let clamped = max(0, seconds)
|
||||
let wasPaused = isPaused
|
||||
// Prevent time-pos updates from overwriting during seek
|
||||
isSeeking = true
|
||||
// Update cached position BEFORE seek so new frames get correct timestamp
|
||||
cachedPosition = clamped
|
||||
positionUpdateTime = CACurrentMediaTime() // Reset interpolation base
|
||||
lastPTS = clamped // Reset monotonic PTS to new position
|
||||
// Update timebase to match new position (sets rate to 1 for frame display)
|
||||
positionUpdateTime = CACurrentMediaTime()
|
||||
lastPTS = clamped
|
||||
syncTimebase(to: clamped)
|
||||
// Sync seek for accurate positioning
|
||||
commandSync(handle, ["seek", String(clamped), "absolute"])
|
||||
isSeeking = false
|
||||
// Restore paused rate after seek completes
|
||||
if wasPaused {
|
||||
restoreTimebaseRate()
|
||||
}
|
||||
@@ -1126,19 +873,14 @@ final class MPVSoftwareRenderer {
|
||||
func seek(by seconds: Double) {
|
||||
guard let handle = mpv else { return }
|
||||
let wasPaused = isPaused
|
||||
// Prevent time-pos updates from overwriting during seek
|
||||
isSeeking = true
|
||||
// Update cached position BEFORE seek
|
||||
let newPosition = max(0, cachedPosition + seconds)
|
||||
cachedPosition = newPosition
|
||||
positionUpdateTime = CACurrentMediaTime() // Reset interpolation base
|
||||
lastPTS = newPosition // Reset monotonic PTS to new position
|
||||
// Update timebase to match new position (sets rate to 1 for frame display)
|
||||
positionUpdateTime = CACurrentMediaTime()
|
||||
lastPTS = newPosition
|
||||
syncTimebase(to: newPosition)
|
||||
// Sync seek for accurate positioning
|
||||
commandSync(handle, ["seek", String(seconds), "relative"])
|
||||
isSeeking = false
|
||||
// Restore paused rate after seek completes
|
||||
if wasPaused {
|
||||
restoreTimebaseRate()
|
||||
}
|
||||
@@ -1157,17 +899,13 @@ final class MPVSoftwareRenderer {
|
||||
let speed = playbackSpeed
|
||||
let doWork = { [weak self] in
|
||||
guard let self = self else { return }
|
||||
// Flush old frames to avoid "old frames with new clock" mismatches
|
||||
if #available(iOS 17.0, *) {
|
||||
self.displayLayer.sampleBufferRenderer.flush(removingDisplayedImage: false, completionHandler: nil)
|
||||
} else {
|
||||
self.displayLayer.flush()
|
||||
}
|
||||
if let timebase = self.displayLayer.controlTimebase {
|
||||
// Update timebase to new position
|
||||
CMTimebaseSetTime(timebase, time: CMTime(seconds: position, preferredTimescale: 1000))
|
||||
// Set rate to playback speed during seek to ensure frame displays
|
||||
// restoreTimebaseRate() will set it back to 0 if paused
|
||||
CMTimebaseSetRate(timebase, rate: speed)
|
||||
}
|
||||
}
|
||||
@@ -1179,7 +917,6 @@ final class MPVSoftwareRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
/// Sync timebase with current position without flushing (for smooth PiP transitions)
|
||||
func syncTimebase() {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self = self else { return }
|
||||
@@ -1193,7 +930,6 @@ final class MPVSoftwareRenderer {
|
||||
func setSpeed(_ speed: Double) {
|
||||
playbackSpeed = speed
|
||||
setProperty(name: "speed", value: String(speed))
|
||||
// Sync timebase rate with playback speed
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self = self,
|
||||
let timebase = self.displayLayer.controlTimebase else { return }
|
||||
@@ -1212,10 +948,7 @@ final class MPVSoftwareRenderer {
|
||||
// MARK: - Subtitle Controls
|
||||
|
||||
func getSubtitleTracks() -> [[String: Any]] {
|
||||
guard let handle = mpv else {
|
||||
Logger.shared.log("getSubtitleTracks: mpv handle is nil", type: "Warn")
|
||||
return []
|
||||
}
|
||||
guard let handle = mpv else { return [] }
|
||||
var tracks: [[String: Any]] = []
|
||||
|
||||
var trackCount: Int64 = 0
|
||||
@@ -1246,24 +979,14 @@ final class MPVSoftwareRenderer {
|
||||
getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected)
|
||||
track["selected"] = selected != 0
|
||||
|
||||
Logger.shared.log("getSubtitleTracks: found sub track id=\(trackId), title=\(track["title"] ?? "none"), lang=\(track["lang"] ?? "none")", type: "Info")
|
||||
tracks.append(track)
|
||||
}
|
||||
|
||||
Logger.shared.log("getSubtitleTracks: returning \(tracks.count) subtitle tracks", type: "Info")
|
||||
return tracks
|
||||
}
|
||||
|
||||
func setSubtitleTrack(_ trackId: Int) {
|
||||
Logger.shared.log("setSubtitleTrack: setting sid to \(trackId)", type: "Info")
|
||||
guard let handle = mpv else {
|
||||
Logger.shared.log("setSubtitleTrack: mpv handle is nil!", type: "Error")
|
||||
return
|
||||
}
|
||||
|
||||
// Use setProperty for synchronous behavior (command is async)
|
||||
if trackId < 0 {
|
||||
// Disable subtitles
|
||||
setProperty(name: "sid", value: "no")
|
||||
} else {
|
||||
setProperty(name: "sid", value: String(trackId))
|
||||
@@ -1283,7 +1006,6 @@ final class MPVSoftwareRenderer {
|
||||
|
||||
func addSubtitleFile(url: String, select: Bool = true) {
|
||||
guard let handle = mpv else { return }
|
||||
// "cached" adds without selecting, "select" adds and selects
|
||||
let flag = select ? "select" : "cached"
|
||||
commandSync(handle, ["sub-add", url, flag])
|
||||
}
|
||||
@@ -1317,10 +1039,7 @@ final class MPVSoftwareRenderer {
|
||||
// MARK: - Audio Track Controls
|
||||
|
||||
func getAudioTracks() -> [[String: Any]] {
|
||||
guard let handle = mpv else {
|
||||
Logger.shared.log("getAudioTracks: mpv handle is nil", type: "Warn")
|
||||
return []
|
||||
}
|
||||
guard let handle = mpv else { return [] }
|
||||
var tracks: [[String: Any]] = []
|
||||
|
||||
var trackCount: Int64 = 0
|
||||
@@ -1361,22 +1080,13 @@ final class MPVSoftwareRenderer {
|
||||
getProperty(handle: handle, name: "track-list/\(i)/selected", format: MPV_FORMAT_FLAG, value: &selected)
|
||||
track["selected"] = selected != 0
|
||||
|
||||
Logger.shared.log("getAudioTracks: found audio track id=\(trackId), title=\(track["title"] ?? "none"), lang=\(track["lang"] ?? "none")", type: "Info")
|
||||
tracks.append(track)
|
||||
}
|
||||
|
||||
Logger.shared.log("getAudioTracks: returning \(tracks.count) audio tracks", type: "Info")
|
||||
return tracks
|
||||
}
|
||||
|
||||
func setAudioTrack(_ trackId: Int) {
|
||||
guard let handle = mpv else {
|
||||
Logger.shared.log("setAudioTrack: mpv handle is nil", type: "Warn")
|
||||
return
|
||||
}
|
||||
Logger.shared.log("setAudioTrack: setting aid to \(trackId)", type: "Info")
|
||||
|
||||
// Use setProperty for synchronous behavior
|
||||
setProperty(name: "aid", value: String(trackId))
|
||||
}
|
||||
|
||||
@@ -4,25 +4,7 @@ public class MpvPlayerModule: Module {
|
||||
public func definition() -> ModuleDefinition {
|
||||
Name("MpvPlayer")
|
||||
|
||||
// Defines event names that the module can send to JavaScript.
|
||||
Events("onChange")
|
||||
|
||||
// Defines a JavaScript synchronous function that runs the native code on the JavaScript thread.
|
||||
Function("hello") {
|
||||
return "Hello from MPV Player! 👋"
|
||||
}
|
||||
|
||||
// Defines a JavaScript function that always returns a Promise and whose native code
|
||||
// is by default dispatched on the different thread than the JavaScript runtime runs on.
|
||||
AsyncFunction("setValueAsync") { (value: String) in
|
||||
// Send an event to JavaScript.
|
||||
self.sendEvent("onChange", [
|
||||
"value": value
|
||||
])
|
||||
}
|
||||
|
||||
// Enables the module to be used as a native view. Definition components that are accepted as part of the
|
||||
// view definition: Prop, Events.
|
||||
// Enables the module to be used as a native view
|
||||
View(MpvPlayerView.self) {
|
||||
// All video load options are passed via a single "source" prop
|
||||
Prop("source") { (view: MpvPlayerView, source: [String: Any]?) in
|
||||
@@ -43,52 +25,44 @@ public class MpvPlayerModule: Module {
|
||||
view.loadVideo(config: config)
|
||||
}
|
||||
|
||||
// Async function to play video
|
||||
// Playback controls
|
||||
AsyncFunction("play") { (view: MpvPlayerView) in
|
||||
view.play()
|
||||
}
|
||||
|
||||
// Async function to pause video
|
||||
AsyncFunction("pause") { (view: MpvPlayerView) in
|
||||
view.pause()
|
||||
}
|
||||
|
||||
// Async function to seek to position
|
||||
AsyncFunction("seekTo") { (view: MpvPlayerView, position: Double) in
|
||||
view.seekTo(position: position)
|
||||
}
|
||||
|
||||
// Async function to seek by offset
|
||||
AsyncFunction("seekBy") { (view: MpvPlayerView, offset: Double) in
|
||||
view.seekBy(offset: offset)
|
||||
}
|
||||
|
||||
// Async function to set playback speed
|
||||
AsyncFunction("setSpeed") { (view: MpvPlayerView, speed: Double) in
|
||||
view.setSpeed(speed: speed)
|
||||
}
|
||||
|
||||
// Function to get current speed
|
||||
AsyncFunction("getSpeed") { (view: MpvPlayerView) -> Double in
|
||||
return view.getSpeed()
|
||||
}
|
||||
|
||||
// Function to check if paused
|
||||
AsyncFunction("isPaused") { (view: MpvPlayerView) -> Bool in
|
||||
return view.isPaused()
|
||||
}
|
||||
|
||||
// Function to get current position
|
||||
AsyncFunction("getCurrentPosition") { (view: MpvPlayerView) -> Double in
|
||||
return view.getCurrentPosition()
|
||||
}
|
||||
|
||||
// Function to get duration
|
||||
AsyncFunction("getDuration") { (view: MpvPlayerView) -> Double in
|
||||
return view.getDuration()
|
||||
}
|
||||
|
||||
// Picture in Picture functions
|
||||
// Picture in Picture
|
||||
AsyncFunction("startPictureInPicture") { (view: MpvPlayerView) in
|
||||
view.startPictureInPicture()
|
||||
}
|
||||
@@ -126,7 +100,7 @@ public class MpvPlayerModule: Module {
|
||||
view.addSubtitleFile(url: url, select: select)
|
||||
}
|
||||
|
||||
// Subtitle positioning functions
|
||||
// Subtitle positioning
|
||||
AsyncFunction("setSubtitlePosition") { (view: MpvPlayerView, position: Int) in
|
||||
view.setSubtitlePosition(position)
|
||||
}
|
||||
@@ -164,7 +138,7 @@ public class MpvPlayerModule: Module {
|
||||
return view.getCurrentAudioTrack()
|
||||
}
|
||||
|
||||
// Defines events that the view can send to JavaScript
|
||||
// Events that the view can send to JavaScript
|
||||
Events("onLoad", "onPlaybackStateChange", "onProgress", "onError", "onTracksReady")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,393 +5,375 @@ import UIKit
|
||||
|
||||
/// Configuration for loading a video
|
||||
struct VideoLoadConfig {
|
||||
let url: URL
|
||||
var headers: [String: String]?
|
||||
var externalSubtitles: [String]?
|
||||
var startPosition: Double?
|
||||
var autoplay: Bool
|
||||
/// MPV subtitle track ID to select on start (1-based, -1 to disable, nil to use default)
|
||||
var initialSubtitleId: Int?
|
||||
/// MPV audio track ID to select on start (1-based, nil to use default)
|
||||
var initialAudioId: Int?
|
||||
|
||||
init(
|
||||
url: URL,
|
||||
headers: [String: String]? = nil,
|
||||
externalSubtitles: [String]? = nil,
|
||||
startPosition: Double? = nil,
|
||||
autoplay: Bool = true,
|
||||
initialSubtitleId: Int? = nil,
|
||||
initialAudioId: Int? = nil
|
||||
) {
|
||||
self.url = url
|
||||
self.headers = headers
|
||||
self.externalSubtitles = externalSubtitles
|
||||
self.startPosition = startPosition
|
||||
self.autoplay = autoplay
|
||||
self.initialSubtitleId = initialSubtitleId
|
||||
self.initialAudioId = initialAudioId
|
||||
}
|
||||
let url: URL
|
||||
var headers: [String: String]?
|
||||
var externalSubtitles: [String]?
|
||||
var startPosition: Double?
|
||||
var autoplay: Bool
|
||||
/// MPV subtitle track ID to select on start (1-based, -1 to disable, nil to use default)
|
||||
var initialSubtitleId: Int?
|
||||
/// MPV audio track ID to select on start (1-based, nil to use default)
|
||||
var initialAudioId: Int?
|
||||
|
||||
init(
|
||||
url: URL,
|
||||
headers: [String: String]? = nil,
|
||||
externalSubtitles: [String]? = nil,
|
||||
startPosition: Double? = nil,
|
||||
autoplay: Bool = true,
|
||||
initialSubtitleId: Int? = nil,
|
||||
initialAudioId: Int? = nil
|
||||
) {
|
||||
self.url = url
|
||||
self.headers = headers
|
||||
self.externalSubtitles = externalSubtitles
|
||||
self.startPosition = startPosition
|
||||
self.autoplay = autoplay
|
||||
self.initialSubtitleId = initialSubtitleId
|
||||
self.initialAudioId = initialAudioId
|
||||
}
|
||||
}
|
||||
|
||||
// This view will be used as a native component. Make sure to inherit from `ExpoView`
|
||||
// to apply the proper styling (e.g. border radius and shadows).
|
||||
class MpvPlayerView: ExpoView {
|
||||
private let displayLayer = AVSampleBufferDisplayLayer()
|
||||
private var renderer: MPVSoftwareRenderer?
|
||||
private var videoContainer: UIView!
|
||||
private var pipController: PiPController?
|
||||
private let displayLayer = AVSampleBufferDisplayLayer()
|
||||
private var renderer: MPVMetalRenderer?
|
||||
private var videoContainer: UIView!
|
||||
private var pipController: PiPController?
|
||||
|
||||
let onLoad = EventDispatcher()
|
||||
let onPlaybackStateChange = EventDispatcher()
|
||||
let onProgress = EventDispatcher()
|
||||
let onError = EventDispatcher()
|
||||
let onTracksReady = EventDispatcher()
|
||||
let onLoad = EventDispatcher()
|
||||
let onPlaybackStateChange = EventDispatcher()
|
||||
let onProgress = EventDispatcher()
|
||||
let onError = EventDispatcher()
|
||||
let onTracksReady = EventDispatcher()
|
||||
|
||||
private var currentURL: URL?
|
||||
private var cachedPosition: Double = 0
|
||||
private var cachedDuration: Double = 0
|
||||
private var intendedPlayState: Bool = false // For PiP - ignores transient states during seek
|
||||
private var currentURL: URL?
|
||||
private var cachedPosition: Double = 0
|
||||
private var cachedDuration: Double = 0
|
||||
private var intendedPlayState: Bool = false
|
||||
|
||||
required init(appContext: AppContext? = nil) {
|
||||
super.init(appContext: appContext)
|
||||
setupView()
|
||||
}
|
||||
required init(appContext: AppContext? = nil) {
|
||||
super.init(appContext: appContext)
|
||||
setupView()
|
||||
}
|
||||
|
||||
private func setupView() {
|
||||
clipsToBounds = true
|
||||
backgroundColor = .black
|
||||
private func setupView() {
|
||||
clipsToBounds = true
|
||||
backgroundColor = .black
|
||||
|
||||
videoContainer = UIView()
|
||||
videoContainer.translatesAutoresizingMaskIntoConstraints = false
|
||||
videoContainer.backgroundColor = .black
|
||||
videoContainer.clipsToBounds = true
|
||||
addSubview(videoContainer)
|
||||
videoContainer = UIView()
|
||||
videoContainer.translatesAutoresizingMaskIntoConstraints = false
|
||||
videoContainer.backgroundColor = .black
|
||||
videoContainer.clipsToBounds = true
|
||||
addSubview(videoContainer)
|
||||
|
||||
displayLayer.frame = bounds
|
||||
displayLayer.videoGravity = .resizeAspect
|
||||
if #available(iOS 17.0, *) {
|
||||
displayLayer.wantsExtendedDynamicRangeContent = true
|
||||
}
|
||||
displayLayer.backgroundColor = UIColor.black.cgColor
|
||||
videoContainer.layer.addSublayer(displayLayer)
|
||||
displayLayer.frame = bounds
|
||||
displayLayer.videoGravity = .resizeAspect
|
||||
if #available(iOS 17.0, *) {
|
||||
displayLayer.wantsExtendedDynamicRangeContent = true
|
||||
}
|
||||
displayLayer.backgroundColor = UIColor.black.cgColor
|
||||
videoContainer.layer.addSublayer(displayLayer)
|
||||
|
||||
NSLayoutConstraint.activate([
|
||||
videoContainer.topAnchor.constraint(equalTo: topAnchor),
|
||||
videoContainer.leadingAnchor.constraint(equalTo: leadingAnchor),
|
||||
videoContainer.trailingAnchor.constraint(equalTo: trailingAnchor),
|
||||
videoContainer.bottomAnchor.constraint(equalTo: bottomAnchor)
|
||||
])
|
||||
NSLayoutConstraint.activate([
|
||||
videoContainer.topAnchor.constraint(equalTo: topAnchor),
|
||||
videoContainer.leadingAnchor.constraint(equalTo: leadingAnchor),
|
||||
videoContainer.trailingAnchor.constraint(equalTo: trailingAnchor),
|
||||
videoContainer.bottomAnchor.constraint(equalTo: bottomAnchor)
|
||||
])
|
||||
|
||||
renderer = MPVSoftwareRenderer(displayLayer: displayLayer)
|
||||
renderer?.delegate = self
|
||||
do {
|
||||
renderer = try MPVMetalRenderer(displayLayer: displayLayer)
|
||||
renderer?.delegate = self
|
||||
try renderer?.start()
|
||||
} catch MPVMetalRenderer.RendererError.metalNotSupported {
|
||||
onError(["error": "Metal is not supported on this device"])
|
||||
} catch {
|
||||
onError(["error": "Failed to start renderer: \(error.localizedDescription)"])
|
||||
}
|
||||
|
||||
// Setup PiP
|
||||
pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
|
||||
pipController?.delegate = self
|
||||
// Setup PiP
|
||||
pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
|
||||
pipController?.delegate = self
|
||||
}
|
||||
|
||||
do {
|
||||
try renderer?.start()
|
||||
} catch {
|
||||
onError(["error": "Failed to start renderer: \(error.localizedDescription)"])
|
||||
}
|
||||
}
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
displayLayer.frame = videoContainer.bounds
|
||||
displayLayer.isHidden = false
|
||||
displayLayer.opacity = 1.0
|
||||
CATransaction.commit()
|
||||
}
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
displayLayer.frame = videoContainer.bounds
|
||||
displayLayer.isHidden = false
|
||||
displayLayer.opacity = 1.0
|
||||
CATransaction.commit()
|
||||
}
|
||||
func loadVideo(config: VideoLoadConfig) {
|
||||
// Skip reload if same URL is already playing
|
||||
if currentURL == config.url {
|
||||
return
|
||||
}
|
||||
currentURL = config.url
|
||||
|
||||
func loadVideo(config: VideoLoadConfig) {
|
||||
// Skip reload if same URL is already playing
|
||||
if currentURL == config.url {
|
||||
return
|
||||
}
|
||||
currentURL = config.url
|
||||
let preset = PlayerPreset(
|
||||
id: .sdrRec709,
|
||||
title: "Default",
|
||||
summary: "Default playback preset",
|
||||
stream: nil,
|
||||
commands: []
|
||||
)
|
||||
|
||||
let preset = PlayerPreset(
|
||||
id: .sdrRec709,
|
||||
title: "Default",
|
||||
summary: "Default playback preset",
|
||||
stream: nil,
|
||||
commands: []
|
||||
)
|
||||
// Pass everything to the renderer
|
||||
renderer?.load(
|
||||
url: config.url,
|
||||
with: preset,
|
||||
headers: config.headers,
|
||||
startPosition: config.startPosition,
|
||||
externalSubtitles: config.externalSubtitles,
|
||||
initialSubtitleId: config.initialSubtitleId,
|
||||
initialAudioId: config.initialAudioId
|
||||
)
|
||||
|
||||
if config.autoplay {
|
||||
play()
|
||||
}
|
||||
|
||||
onLoad(["url": config.url.absoluteString])
|
||||
}
|
||||
|
||||
// Convenience method for simple loads
|
||||
func loadVideo(url: URL, headers: [String: String]? = nil) {
|
||||
loadVideo(config: VideoLoadConfig(url: url, headers: headers))
|
||||
}
|
||||
|
||||
// Pass everything to the renderer - it handles start position and external subs
|
||||
renderer?.load(
|
||||
url: config.url,
|
||||
with: preset,
|
||||
headers: config.headers,
|
||||
startPosition: config.startPosition,
|
||||
externalSubtitles: config.externalSubtitles,
|
||||
initialSubtitleId: config.initialSubtitleId,
|
||||
initialAudioId: config.initialAudioId
|
||||
)
|
||||
|
||||
if config.autoplay {
|
||||
play()
|
||||
}
|
||||
|
||||
onLoad(["url": config.url.absoluteString])
|
||||
}
|
||||
|
||||
// Convenience method for simple loads
|
||||
func loadVideo(url: URL, headers: [String: String]? = nil) {
|
||||
loadVideo(config: VideoLoadConfig(url: url, headers: headers))
|
||||
}
|
||||
func play() {
|
||||
intendedPlayState = true
|
||||
renderer?.play()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func play() {
|
||||
intendedPlayState = true
|
||||
renderer?.play()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
func pause() {
|
||||
intendedPlayState = false
|
||||
renderer?.pausePlayback()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func pause() {
|
||||
intendedPlayState = false
|
||||
renderer?.pausePlayback()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
func seekTo(position: Double) {
|
||||
renderer?.seek(to: position)
|
||||
}
|
||||
|
||||
func seekTo(position: Double) {
|
||||
renderer?.seek(to: position)
|
||||
}
|
||||
func seekBy(offset: Double) {
|
||||
renderer?.seek(by: offset)
|
||||
}
|
||||
|
||||
func seekBy(offset: Double) {
|
||||
renderer?.seek(by: offset)
|
||||
}
|
||||
func setSpeed(speed: Double) {
|
||||
renderer?.setSpeed(speed)
|
||||
}
|
||||
|
||||
func setSpeed(speed: Double) {
|
||||
renderer?.setSpeed(speed)
|
||||
}
|
||||
func getSpeed() -> Double {
|
||||
return renderer?.getSpeed() ?? 1.0
|
||||
}
|
||||
|
||||
func getSpeed() -> Double {
|
||||
return renderer?.getSpeed() ?? 1.0
|
||||
}
|
||||
func isPaused() -> Bool {
|
||||
return renderer?.isPausedState ?? true
|
||||
}
|
||||
|
||||
func isPaused() -> Bool {
|
||||
return renderer?.isPausedState ?? true
|
||||
}
|
||||
func getCurrentPosition() -> Double {
|
||||
return cachedPosition
|
||||
}
|
||||
|
||||
func getCurrentPosition() -> Double {
|
||||
return cachedPosition
|
||||
}
|
||||
func getDuration() -> Double {
|
||||
return cachedDuration
|
||||
}
|
||||
|
||||
func getDuration() -> Double {
|
||||
return cachedDuration
|
||||
}
|
||||
// MARK: - Picture in Picture
|
||||
|
||||
// MARK: - Picture in Picture
|
||||
func startPictureInPicture() {
|
||||
pipController?.startPictureInPicture()
|
||||
}
|
||||
|
||||
func startPictureInPicture() {
|
||||
print("🎬 MpvPlayerView: startPictureInPicture called")
|
||||
print("🎬 Duration: \(getDuration()), IsPlaying: \(!isPaused())")
|
||||
pipController?.startPictureInPicture()
|
||||
}
|
||||
func stopPictureInPicture() {
|
||||
pipController?.stopPictureInPicture()
|
||||
}
|
||||
|
||||
func stopPictureInPicture() {
|
||||
pipController?.stopPictureInPicture()
|
||||
}
|
||||
func isPictureInPictureSupported() -> Bool {
|
||||
return pipController?.isPictureInPictureSupported ?? false
|
||||
}
|
||||
|
||||
func isPictureInPictureSupported() -> Bool {
|
||||
return pipController?.isPictureInPictureSupported ?? false
|
||||
}
|
||||
func isPictureInPictureActive() -> Bool {
|
||||
return pipController?.isPictureInPictureActive ?? false
|
||||
}
|
||||
|
||||
// MARK: - Subtitle Controls
|
||||
|
||||
func getSubtitleTracks() -> [[String: Any]] {
|
||||
return renderer?.getSubtitleTracks() ?? []
|
||||
}
|
||||
|
||||
func setSubtitleTrack(_ trackId: Int) {
|
||||
renderer?.setSubtitleTrack(trackId)
|
||||
}
|
||||
|
||||
func disableSubtitles() {
|
||||
renderer?.disableSubtitles()
|
||||
}
|
||||
|
||||
func getCurrentSubtitleTrack() -> Int {
|
||||
return renderer?.getCurrentSubtitleTrack() ?? 0
|
||||
}
|
||||
|
||||
func addSubtitleFile(url: String, select: Bool = true) {
|
||||
renderer?.addSubtitleFile(url: url, select: select)
|
||||
}
|
||||
|
||||
// MARK: - Audio Track Controls
|
||||
|
||||
func getAudioTracks() -> [[String: Any]] {
|
||||
return renderer?.getAudioTracks() ?? []
|
||||
}
|
||||
|
||||
func setAudioTrack(_ trackId: Int) {
|
||||
renderer?.setAudioTrack(trackId)
|
||||
}
|
||||
|
||||
func getCurrentAudioTrack() -> Int {
|
||||
return renderer?.getCurrentAudioTrack() ?? 0
|
||||
}
|
||||
|
||||
// MARK: - Subtitle Positioning
|
||||
|
||||
func setSubtitlePosition(_ position: Int) {
|
||||
renderer?.setSubtitlePosition(position)
|
||||
}
|
||||
|
||||
func setSubtitleScale(_ scale: Double) {
|
||||
renderer?.setSubtitleScale(scale)
|
||||
}
|
||||
|
||||
func setSubtitleMarginY(_ margin: Int) {
|
||||
renderer?.setSubtitleMarginY(margin)
|
||||
}
|
||||
|
||||
func setSubtitleAlignX(_ alignment: String) {
|
||||
renderer?.setSubtitleAlignX(alignment)
|
||||
}
|
||||
|
||||
func setSubtitleAlignY(_ alignment: String) {
|
||||
renderer?.setSubtitleAlignY(alignment)
|
||||
}
|
||||
|
||||
func setSubtitleFontSize(_ size: Int) {
|
||||
renderer?.setSubtitleFontSize(size)
|
||||
}
|
||||
|
||||
func isPictureInPictureActive() -> Bool {
|
||||
return pipController?.isPictureInPictureActive ?? false
|
||||
}
|
||||
|
||||
// MARK: - Subtitle Controls
|
||||
|
||||
func getSubtitleTracks() -> [[String: Any]] {
|
||||
return renderer?.getSubtitleTracks() ?? []
|
||||
}
|
||||
|
||||
func setSubtitleTrack(_ trackId: Int) {
|
||||
renderer?.setSubtitleTrack(trackId)
|
||||
}
|
||||
|
||||
func disableSubtitles() {
|
||||
renderer?.disableSubtitles()
|
||||
}
|
||||
|
||||
func getCurrentSubtitleTrack() -> Int {
|
||||
return renderer?.getCurrentSubtitleTrack() ?? 0
|
||||
}
|
||||
|
||||
func addSubtitleFile(url: String, select: Bool = true) {
|
||||
renderer?.addSubtitleFile(url: url, select: select)
|
||||
}
|
||||
|
||||
// MARK: - Audio Track Controls
|
||||
|
||||
func getAudioTracks() -> [[String: Any]] {
|
||||
return renderer?.getAudioTracks() ?? []
|
||||
}
|
||||
|
||||
func setAudioTrack(_ trackId: Int) {
|
||||
renderer?.setAudioTrack(trackId)
|
||||
}
|
||||
|
||||
func getCurrentAudioTrack() -> Int {
|
||||
return renderer?.getCurrentAudioTrack() ?? 0
|
||||
}
|
||||
|
||||
// MARK: - Subtitle Positioning
|
||||
|
||||
func setSubtitlePosition(_ position: Int) {
|
||||
renderer?.setSubtitlePosition(position)
|
||||
}
|
||||
|
||||
func setSubtitleScale(_ scale: Double) {
|
||||
renderer?.setSubtitleScale(scale)
|
||||
}
|
||||
|
||||
func setSubtitleMarginY(_ margin: Int) {
|
||||
renderer?.setSubtitleMarginY(margin)
|
||||
}
|
||||
|
||||
func setSubtitleAlignX(_ alignment: String) {
|
||||
renderer?.setSubtitleAlignX(alignment)
|
||||
}
|
||||
|
||||
func setSubtitleAlignY(_ alignment: String) {
|
||||
renderer?.setSubtitleAlignY(alignment)
|
||||
}
|
||||
|
||||
func setSubtitleFontSize(_ size: Int) {
|
||||
renderer?.setSubtitleFontSize(size)
|
||||
}
|
||||
|
||||
deinit {
|
||||
pipController?.stopPictureInPicture()
|
||||
renderer?.stop()
|
||||
displayLayer.removeFromSuperlayer()
|
||||
}
|
||||
deinit {
|
||||
pipController?.stopPictureInPicture()
|
||||
renderer?.stop()
|
||||
displayLayer.removeFromSuperlayer()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - MPVSoftwareRendererDelegate
|
||||
// MARK: - MPVMetalRendererDelegate
|
||||
|
||||
extension MpvPlayerView: MPVSoftwareRendererDelegate {
|
||||
func renderer(_: MPVSoftwareRenderer, didUpdatePosition position: Double, duration: Double) {
|
||||
cachedPosition = position
|
||||
cachedDuration = duration
|
||||
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
// Only update PiP state when PiP is active
|
||||
if self.pipController?.isPictureInPictureActive == true {
|
||||
self.pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
self.onProgress([
|
||||
"position": position,
|
||||
"duration": duration,
|
||||
"progress": duration > 0 ? position / duration : 0,
|
||||
])
|
||||
}
|
||||
}
|
||||
extension MpvPlayerView: MPVMetalRendererDelegate {
|
||||
func renderer(_: MPVMetalRenderer, didUpdatePosition position: Double, duration: Double) {
|
||||
cachedPosition = position
|
||||
cachedDuration = duration
|
||||
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
if self.pipController?.isPictureInPictureActive == true {
|
||||
self.pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
self.onProgress([
|
||||
"position": position,
|
||||
"duration": duration,
|
||||
"progress": duration > 0 ? position / duration : 0,
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
func renderer(_: MPVSoftwareRenderer, didChangePause isPaused: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
// Don't update intendedPlayState here - it's only set by user actions (play/pause)
|
||||
// This prevents PiP UI flicker during seeking
|
||||
self.onPlaybackStateChange([
|
||||
"isPaused": isPaused,
|
||||
"isPlaying": !isPaused,
|
||||
])
|
||||
// Note: Don't call updatePlaybackState() here to avoid flicker
|
||||
// PiP queries pipControllerIsPlaying when it needs the state
|
||||
}
|
||||
}
|
||||
func renderer(_: MPVMetalRenderer, didChangePause isPaused: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onPlaybackStateChange([
|
||||
"isPaused": isPaused,
|
||||
"isPlaying": !isPaused,
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
func renderer(_: MPVSoftwareRenderer, didChangeLoading isLoading: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onPlaybackStateChange([
|
||||
"isLoading": isLoading,
|
||||
])
|
||||
}
|
||||
}
|
||||
func renderer(_: MPVMetalRenderer, didChangeLoading isLoading: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onPlaybackStateChange([
|
||||
"isLoading": isLoading,
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
func renderer(_: MPVSoftwareRenderer, didBecomeReadyToSeek: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onPlaybackStateChange([
|
||||
"isReadyToSeek": didBecomeReadyToSeek,
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
func renderer(_: MPVSoftwareRenderer, didBecomeTracksReady: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onTracksReady([:])
|
||||
}
|
||||
}
|
||||
func renderer(_: MPVMetalRenderer, didBecomeReadyToSeek: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onPlaybackStateChange([
|
||||
"isReadyToSeek": didBecomeReadyToSeek,
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
func renderer(_: MPVMetalRenderer, didBecomeTracksReady: Bool) {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.onTracksReady([:])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - PiPControllerDelegate
|
||||
|
||||
extension MpvPlayerView: PiPControllerDelegate {
|
||||
func pipController(_ controller: PiPController, willStartPictureInPicture: Bool) {
|
||||
print("PiP will start")
|
||||
// Sync timebase before PiP starts for smooth transition
|
||||
renderer?.syncTimebase()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, didStartPictureInPicture: Bool) {
|
||||
print("PiP did start: \(didStartPictureInPicture)")
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, willStopPictureInPicture: Bool) {
|
||||
print("PiP will stop")
|
||||
// Sync timebase before returning from PiP
|
||||
renderer?.syncTimebase()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, didStopPictureInPicture: Bool) {
|
||||
print("PiP did stop")
|
||||
// Ensure timebase is synced after PiP ends
|
||||
renderer?.syncTimebase()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, restoreUserInterfaceForPictureInPictureStop completionHandler: @escaping (Bool) -> Void) {
|
||||
print("PiP restore user interface")
|
||||
completionHandler(true)
|
||||
}
|
||||
|
||||
func pipControllerPlay(_ controller: PiPController) {
|
||||
print("PiP play requested")
|
||||
play()
|
||||
}
|
||||
|
||||
func pipControllerPause(_ controller: PiPController) {
|
||||
print("PiP pause requested")
|
||||
pause()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, skipByInterval interval: CMTime) {
|
||||
let seconds = CMTimeGetSeconds(interval)
|
||||
print("PiP skip by interval: \(seconds)")
|
||||
let target = max(0, cachedPosition + seconds)
|
||||
seekTo(position: target)
|
||||
}
|
||||
|
||||
func pipControllerIsPlaying(_ controller: PiPController) -> Bool {
|
||||
// Use intended state to ignore transient pauses during seeking
|
||||
return intendedPlayState
|
||||
}
|
||||
|
||||
func pipControllerDuration(_ controller: PiPController) -> Double {
|
||||
return getDuration()
|
||||
}
|
||||
func pipController(_ controller: PiPController, willStartPictureInPicture: Bool) {
|
||||
renderer?.syncTimebase()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, didStartPictureInPicture: Bool) {
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, willStopPictureInPicture: Bool) {
|
||||
renderer?.syncTimebase()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, didStopPictureInPicture: Bool) {
|
||||
renderer?.syncTimebase()
|
||||
pipController?.updatePlaybackState()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, restoreUserInterfaceForPictureInPictureStop completionHandler: @escaping (Bool) -> Void) {
|
||||
completionHandler(true)
|
||||
}
|
||||
|
||||
func pipControllerPlay(_ controller: PiPController) {
|
||||
play()
|
||||
}
|
||||
|
||||
func pipControllerPause(_ controller: PiPController) {
|
||||
pause()
|
||||
}
|
||||
|
||||
func pipController(_ controller: PiPController, skipByInterval interval: CMTime) {
|
||||
let seconds = CMTimeGetSeconds(interval)
|
||||
let target = max(0, cachedPosition + seconds)
|
||||
seekTo(position: target)
|
||||
}
|
||||
|
||||
func pipControllerIsPlaying(_ controller: PiPController) -> Bool {
|
||||
return intendedPlayState
|
||||
}
|
||||
|
||||
func pipControllerDuration(_ controller: PiPController) -> Double {
|
||||
return getDuration()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,7 +103,7 @@ extension PiPController: AVPictureInPictureControllerDelegate {
|
||||
}
|
||||
|
||||
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) {
|
||||
print("Failed to start PiP: \(error)")
|
||||
Logger.shared.log("Failed to start PiP: \(error.localizedDescription)", type: "Error")
|
||||
delegate?.pipController(self, didStartPictureInPicture: false)
|
||||
}
|
||||
|
||||
@@ -169,4 +169,4 @@ extension PiPController: AVPictureInPictureSampleBufferPlaybackDelegate {
|
||||
}
|
||||
completion()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
import UIKit
|
||||
import AVFoundation
|
||||
|
||||
final class SampleBufferDisplayView: UIView {
|
||||
override class var layerClass: AnyClass { AVSampleBufferDisplayLayer.self }
|
||||
|
||||
var displayLayer: AVSampleBufferDisplayLayer {
|
||||
return layer as! AVSampleBufferDisplayLayer
|
||||
}
|
||||
|
||||
private(set) var pipController: PiPController?
|
||||
|
||||
weak var pipDelegate: PiPControllerDelegate? {
|
||||
didSet {
|
||||
pipController?.delegate = pipDelegate
|
||||
}
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
commonInit()
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
super.init(coder: coder)
|
||||
commonInit()
|
||||
}
|
||||
|
||||
private func commonInit() {
|
||||
backgroundColor = .black
|
||||
displayLayer.videoGravity = .resizeAspect
|
||||
#if !os(tvOS)
|
||||
#if compiler(>=6.0)
|
||||
if #available(iOS 26.0, *) {
|
||||
displayLayer.preferredDynamicRange = .automatic
|
||||
} else if #available(iOS 17.0, *) {
|
||||
displayLayer.wantsExtendedDynamicRangeContent = true
|
||||
}
|
||||
#endif
|
||||
if #available(iOS 17.0, *) {
|
||||
displayLayer.wantsExtendedDynamicRangeContent = true
|
||||
}
|
||||
#endif
|
||||
setupPictureInPicture()
|
||||
}
|
||||
|
||||
private func setupPictureInPicture() {
|
||||
pipController = PiPController(sampleBufferDisplayLayer: displayLayer)
|
||||
}
|
||||
|
||||
// MARK: - PiP Control Methods
|
||||
|
||||
func startPictureInPicture() {
|
||||
pipController?.startPictureInPicture()
|
||||
}
|
||||
|
||||
func stopPictureInPicture() {
|
||||
pipController?.stopPictureInPicture()
|
||||
}
|
||||
|
||||
var isPictureInPictureSupported: Bool {
|
||||
return pipController?.isPictureInPictureSupported ?? false
|
||||
}
|
||||
|
||||
var isPictureInPictureActive: Bool {
|
||||
return pipController?.isPictureInPictureActive ?? false
|
||||
}
|
||||
|
||||
var isPictureInPicturePossible: Bool {
|
||||
return pipController?.isPictureInPicturePossible ?? false
|
||||
}
|
||||
}
|
||||
@@ -23,14 +23,6 @@ export type OnErrorEventPayload = {
|
||||
|
||||
export type OnTracksReadyEventPayload = Record<string, never>;
|
||||
|
||||
export type MpvPlayerModuleEvents = {
|
||||
onChange: (params: ChangeEventPayload) => void;
|
||||
};
|
||||
|
||||
export type ChangeEventPayload = {
|
||||
value: string;
|
||||
};
|
||||
|
||||
export type VideoSource = {
|
||||
url: string;
|
||||
headers?: Record<string, string>;
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
import { NativeModule, requireNativeModule } from "expo";
|
||||
|
||||
import { MpvPlayerModuleEvents } from "./MpvPlayer.types";
|
||||
|
||||
declare class MpvPlayerModule extends NativeModule<MpvPlayerModuleEvents> {
|
||||
hello(): string;
|
||||
setValueAsync(value: string): Promise<void>;
|
||||
}
|
||||
|
||||
// This call loads the native module object from the JSI.
|
||||
export default requireNativeModule<MpvPlayerModule>("MpvPlayer");
|
||||
@@ -1,19 +0,0 @@
|
||||
import { NativeModule, registerWebModule } from "expo";
|
||||
|
||||
import { ChangeEventPayload } from "./MpvPlayer.types";
|
||||
|
||||
type MpvPlayerModuleEvents = {
|
||||
onChange: (params: ChangeEventPayload) => void;
|
||||
};
|
||||
|
||||
class MpvPlayerModule extends NativeModule<MpvPlayerModuleEvents> {
|
||||
PI = Math.PI;
|
||||
async setValueAsync(value: string): Promise<void> {
|
||||
this.emit("onChange", { value });
|
||||
}
|
||||
hello() {
|
||||
return "Hello world! 👋";
|
||||
}
|
||||
}
|
||||
|
||||
export default registerWebModule(MpvPlayerModule, "MpvPlayerModule");
|
||||
@@ -28,16 +28,16 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
|
||||
await nativeRef.current?.setSpeed(speed);
|
||||
},
|
||||
getSpeed: async () => {
|
||||
return await nativeRef.current?.getSpeed();
|
||||
return (await nativeRef.current?.getSpeed()) ?? 1.0;
|
||||
},
|
||||
isPaused: async () => {
|
||||
return await nativeRef.current?.isPaused();
|
||||
return (await nativeRef.current?.isPaused()) ?? true;
|
||||
},
|
||||
getCurrentPosition: async () => {
|
||||
return await nativeRef.current?.getCurrentPosition();
|
||||
return (await nativeRef.current?.getCurrentPosition()) ?? 0;
|
||||
},
|
||||
getDuration: async () => {
|
||||
return await nativeRef.current?.getDuration();
|
||||
return (await nativeRef.current?.getDuration()) ?? 0;
|
||||
},
|
||||
startPictureInPicture: async () => {
|
||||
await nativeRef.current?.startPictureInPicture();
|
||||
@@ -46,13 +46,15 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
|
||||
await nativeRef.current?.stopPictureInPicture();
|
||||
},
|
||||
isPictureInPictureSupported: async () => {
|
||||
return await nativeRef.current?.isPictureInPictureSupported();
|
||||
return (
|
||||
(await nativeRef.current?.isPictureInPictureSupported()) ?? false
|
||||
);
|
||||
},
|
||||
isPictureInPictureActive: async () => {
|
||||
return await nativeRef.current?.isPictureInPictureActive();
|
||||
return (await nativeRef.current?.isPictureInPictureActive()) ?? false;
|
||||
},
|
||||
getSubtitleTracks: async () => {
|
||||
return await nativeRef.current?.getSubtitleTracks();
|
||||
return (await nativeRef.current?.getSubtitleTracks()) ?? [];
|
||||
},
|
||||
setSubtitleTrack: async (trackId: number) => {
|
||||
await nativeRef.current?.setSubtitleTrack(trackId);
|
||||
@@ -61,7 +63,7 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
|
||||
await nativeRef.current?.disableSubtitles();
|
||||
},
|
||||
getCurrentSubtitleTrack: async () => {
|
||||
return await nativeRef.current?.getCurrentSubtitleTrack();
|
||||
return (await nativeRef.current?.getCurrentSubtitleTrack()) ?? 0;
|
||||
},
|
||||
addSubtitleFile: async (url: string, select = true) => {
|
||||
await nativeRef.current?.addSubtitleFile(url, select);
|
||||
@@ -84,15 +86,14 @@ export default React.forwardRef<MpvPlayerViewRef, MpvPlayerViewProps>(
|
||||
setSubtitleFontSize: async (size: number) => {
|
||||
await nativeRef.current?.setSubtitleFontSize(size);
|
||||
},
|
||||
// Audio controls
|
||||
getAudioTracks: async () => {
|
||||
return await nativeRef.current?.getAudioTracks();
|
||||
return (await nativeRef.current?.getAudioTracks()) ?? [];
|
||||
},
|
||||
setAudioTrack: async (trackId: number) => {
|
||||
await nativeRef.current?.setAudioTrack(trackId);
|
||||
},
|
||||
getCurrentAudioTrack: async () => {
|
||||
return await nativeRef.current?.getCurrentAudioTrack();
|
||||
return (await nativeRef.current?.getCurrentAudioTrack()) ?? 0;
|
||||
},
|
||||
}));
|
||||
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import { MpvPlayerViewProps } from "./MpvPlayer.types";
|
||||
|
||||
export default function MpvPlayerView(props: MpvPlayerViewProps) {
|
||||
return (
|
||||
<div>
|
||||
<iframe
|
||||
title='MPV Player'
|
||||
style={{ flex: 1 }}
|
||||
src={props.url}
|
||||
onLoad={() => props.onLoad({ nativeEvent: { url: props.url } })}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,3 +1,2 @@
|
||||
export * from "./MpvPlayer.types";
|
||||
export { default as MpvPlayerModule } from "./MpvPlayerModule";
|
||||
export { default as MpvPlayerView } from "./MpvPlayerView";
|
||||
|
||||
Reference in New Issue
Block a user