What objects manage AVAssetWriterInputMediaDataRequester? I get a crash after markCurrentPassAsFinished because the call to its delegate property references a deallocated instance.

My goal is to write out a video file with custom-drawn content in each frame, using CGContext. I'm having an issue where after I hit input.markCurrentPassAsFinished(), I get a crash, because [AVAssetWriterInputMediaDataRequester delegate]: message sent to deallocated instance 0x60c00025fa40

But AVAssetWriterInputMediaDataRequester is not a documented class, so I don't know what object isn't being kept around long enough.



Code:

import Foundation

import AVFoundation

import CoreMedia

import Cocoa



class VideoRenderer {


//my demo data model

let title:String


let outputUrl:URL


var progress:Progress


let writer : AVAssetWriter


let completion:(Bool)->()


init(title:String, outputUrl:URL, completion:@escaping(Bool)->()) throws {

self.title = title

self.outputUrl = outputUrl

self.completion = completion

progress = Progress(parent: nil, userInfo: nil)

writer = try AVAssetWriter(outputURL: outputUrl, fileType: .mp4)

/* guard let pixelPool = CVPixelBufferPool.create(width: 1280, height: 720)

else {

throw PoolCreatorError.cantEven

}

self.pixelPool = pixelPool */

}



func export() {

//must specify AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey

//AVVideoCodecKey = AVVideoCodecTypeH264

let input = AVAssetWriterInput(mediaType: .video, outputSettings: [

AVVideoCodecKey : AVVideoCodecType.h264,

AVVideoWidthKey:1280,

AVVideoHeightKey:720,

AVVideoCompressionPropertiesKey:[

AVVideoAverageBitRateKey:2600000 as NSNumber

,AVVideoExpectedSourceFrameRateKey:30.0 as NSNumber

// ,AVVideoAverageNonDroppableFrameRateKey:30 as NSNumber //not supported for H.264

]


//lots of additional keys related to colors

])

input.mediaTimeScale = 30000 //to support 29.97 for NTSC

writer.add(input)

inputWriter = input


let bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: input, sourcePixelBufferAttributes: [

kCVPixelBufferCGBitmapContextCompatibilityKey as String:true,

kCVPixelBufferWidthKey as String:1280 as CFNumber,

kCVPixelBufferHeightKey as String:720 as CFNumber,

kCVPixelBufferPixelFormatTypeKey as String:kCVPixelFormatType_32ARGB

] )

pixelBufferAdapter = bufferAdapter

if !writer.startWriting() {

completion(false)

return

}

writer.startSession(atSourceTime:CMTime(seconds: 0.0, preferredTimescale: 30) /*CMTime(value: 0, timescale: 30)*/)

input.respondToEachPassDescription(on: DispatchQueue.global(qos: .userInitiated)) {

guard let timeRange:CMTimeRange = input.currentPassDescription?.sourceTimeRanges.first?.timeRangeValue else {

//we're done

self.writer.finishWriting(completionHandler: {

self.didComplete()

//self.pixelBufferAdapter = nil

})

return

}

self.lastGeneratedTime = timeRange.start.seconds

input.requestMediaDataWhenReady(on: DispatchQueue.global(qos: .userInitiated)) {

while input.isReadyForMoreMediaData {

//get time

let thisTimeStamp = CMTime(seconds: self.lastGeneratedTime, preferredTimescale: input.mediaTimeScale)

if thisTimeStamp.seconds >= 2.0 { //compare to end of timeRange

input.markCurrentPassAsFinished()

return

}

let nextTime:Double = self.lastGeneratedTime + (1/30.0)

//print("thisTime = \(thisTimeStamp.seconds)")

//if over, cancel render


guard let buffer:CVPixelBuffer = bufferAdapter.pixelBufferDrawing({ context in

//TODO: draw frame

context.setFillColor(NSColor.blue.cgColor)

context.fill(CGRect(x: 0.0, y: 0.0, width: 1280.0, height: 720.0))

}) else {

continue

}


if !bufferAdapter.append(buffer, withPresentationTime: thisTimeStamp) {

print("didn't append")

}

self.lastGeneratedTime = nextTime

}

}

}




//endsession?

}


//var pixelPool:CVPixelBufferPool


func didComplete() {

if writer.status != .completed

,let error = writer.error {

print("writer error = \(error)")

}

completion(writer.status == .completed)

}


var lastGeneratedTime:Double = 0.0


var pixelBufferAdapter:AVAssetWriterInputPixelBufferAdaptor?

var inputWriter:AVAssetWriterInput?


}


extension AVAssetWriterInputPixelBufferAdaptor {


func pixelBufferDrawing(_ work:(CGContext)->())->CVPixelBuffer? {

var bufferOrNil:CVPixelBuffer?

guard let pool = pixelBufferPool

,CVPixelBufferPoolCreatePixelBuffer(nil, pool, &bufferOrNil) == kCVReturnSuccess

,let buffer = bufferOrNil

else {

return nil

}


let width:Int = CVPixelBufferGetWidth(buffer)

let height:Int = CVPixelBufferGetHeight(buffer)

CVPixelBufferLockBaseAddress(buffer, [])

//create a cg graphics context using the pixel buffer's data bytes

let data = CVPixelBufferGetBaseAddress(buffer)

let rgbColorSpace = CGColorSpaceCreateDeviceRGB()

guard let context = CGContext(data: data, width: width, height: height,

bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)

else {

return nil

}

work(context)


CVPixelBufferUnlockBaseAddress(buffer, [])

return buffer

}


}

I have no solutionj but some questions (hope that will help).


In the following :


self.lastGeneratedTime = timeRange.start.seconds
  input.requestMediaDataWhenReady(on: DispatchQueue.global(qos: .userInitiated)) {
  while input.isReadyForMoreMediaData {
  //get time
  let thisTimeStamp = CMTime(seconds: self.lastGeneratedTime, preferredTimescale: input.mediaTimeScale)
  if thisTimeStamp.seconds >= 2.0 { //compare to end of timeRange
  input.markCurrentPassAsFinished()
  return
  }


Doc for requestMediaDataWhenReady(on:using:) says :

Discussion

The block should append media data to the input either until the input’s

isReadyForMoreMediaData
property becomes
false
or until there is no more media data to supply (at which point it may choose to mark the input as finished using
markAsFinished()
).


Seems you are not appending media data, but just read time.


Is that intentional ?

In my simple test, I intend for the final movie to be 2.0 seconds long, so at or after 2.0 seconds there is no more media.


I can make the export succeed if I use ignore all the multiple-pass stuff and use this instead:

if thisTimeStamp.seconds >= 2.0 { //compare to end of timeRange
  input.markAsFinished()
  self.writer.finishWriting(completionHandler: {     //AVAssetWriter
     self.didComplete()     //reports completion back to rest of app
  })
  return
}

However, that leaves me unable to make multi-pass exports. I'm building an app for production-quality video export, so I'd like multi-pass encoding if possible.

What objects manage AVAssetWriterInputMediaDataRequester? I get a crash after markCurrentPassAsFinished because the call to its delegate property references a deallocated instance.
 
 
Q