-
Display HDR video in EDR with AVFoundation and Metal
Learn how you can take advantage of AVFoundation and Metal to build an efficient EDR pipeline. Follow along as we demonstrate how you can use AVPlayer to display HDR video as EDR, add playback into an app view, render it with Metal, and use Core Image or custom Metal shaders to add video effects such as keying or color management. Whether you develop games or pro apps, we'll help you decide which frameworks to use and share best practices for selecting transports, colorspaces, and pixelbuffer formats.
Ressources
Vidéos connexes
WWDC23
Tech Talks
WWDC22
WWDC21
WWDC20
-
Rechercher dans cette vidéo…
-
-
6:58 - Playing media using AVPlayerViewController
// Playing media using AVPlayerViewController let player = AVPlayer(URL: videoURL) // Creating a player view controller var playerViewController = AVPlayerViewController() playerViewController.player = player self.presentViewController(playerViewController, animated: true) { playerViewController.player!.play() } -
7:38 - Playing media using AVPlayer and AVPlayerLayer
// Playing media using AVPlayer and AVPlayerLayer let player = AVPlayer(URL: videoURL) var playerLayer = AVPlayerLayer(player: player) playerLayer.frame = self.view.bounds self.view.layer.addSublayer(playerLayer) player.play() -
9:28 - CAMetalLayer Properties
// Opt into using EDR let layer: CAMetalLayer layer.wantsExtendedDynamicRangeContent = true // Use half-float pixel format layer.pixelFormat = MTLPixelFormatRGBA16Float // Use extended linear display P3 color space layer.colorspace = kCGColorSpaceExtendedLinearDisplayP3 -
11:33 - Create an AVPlayerItemVideoOutput
let videoColorProperties = [ AVVideoColorPrimariesKey: AVVideoColorPrimaries_P3_D65, AVVideoTransferFunctionKey: AVVideoTransferFunction_Linear, AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_2020 ] let outputVideoSettings = [ AVVideoAllowWideColorKey: true, AVVideoColorPropertiesKey: videoColorProperties, kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_64RGBAHalf) ] as [String : Any] // Create a player item video output let videoPlayerItemOutput = AVPlayerItemVideoOutput(outputSettings: outputVideoSettings) -
13:02 - Create a display link
// Create a display link lazy var displayLink: CADisplayLink = CADisplayLink(target: self, selector: #selector(displayLinkCopyPixelBuffers(link:))) var statusObserver: NSKeyValueObservation? statusObserver = videoPlayerItem.observe(\.status, options: [.new, .old], changeHandler: { playerItem, change in if playerItem.status == .readyToPlay { playerItem.add(videoPlayerItemOutput) displayLink.add(to: .main, forMode: .common) videoPlayer?.play() } }) } -
14:16 - Run DisplayLink to get pixel buffers
@objc func displayLinkCopyPixelBuffers(link: CADisplayLink) { let currentTime = videoPlayerItemOutput.itemTime(forHostTime: CACurrentMediaTime()) if videoPlayerItemOutput.hasNewPixelBuffer(forItemTime: currentTime) { if let buffer = videoPlayerItemOutput.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) { let image = CIImage(cvPixelBuffer: buffer!) let filter = CIFilter.sepiaTone() filter.inputImage = image output = filter.outputImage ?? CIImage.empty() // use context to render to you CIRenderDestination } } } -
15:53 - Integrate Core Image
@objc func displayLinkCopyPixelBuffers(link: CADisplayLink) { let currentTime = videoPlayerItemOutput.itemTime(forHostTime: CACurrentMediaTime()) if videoPlayerItemOutput.hasNewPixelBuffer(forItemTime: currentTime) { if let buffer = videoPlayerItemOutput.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) { let image = CIImage(cvPixelBuffer: buffer) let filter = CIFilter.sepiaTone() filter.inputImage = image output = filter.outputImage ?? CIImage.empty() // use context to render to your CIRenderDestination } } } -
19:13 - Using CVMetalTextureCache
// Create a CVMetalTextureCacheRef let mtlDevice = MTLCreateSystemDefaultDevice() var mtlTextureCache: CVMetalTextureCache? = nil CVMetalTextureCacheCreate(allocator: kCFAllocatorDefault, cacheAttributes: nil, metalDevice: mtlDevice, textureAttributes: nil, cacheOut: &mtlTextureCache) // Create a CVMetalTextureRef using metalTextureCache and our pixelBuffer let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) var cvTexture : CVMetalTexture? = nil CVMetalTextureCacheCreateTextureFromImage(allocator: kCFAllocatorDefault, textureCache: mtlTextureCache, sourceImage: pixelBuffer, textureAttributes: nil, pixelFormat: MTLPixelFormatRGBA16Float, width: width, height: height, planeIndex: 0, textureOut: &cvTexture) let texture = CVMetalTextureGetTexture(cvTexture) // In Obj-C, release CVMetalTextureRef in Metal command buffer completion handlers
-