I am simply trying to capture the screen, app audio and mic audio. The app audio and mic audio independently work fine but when combined some unknown error is thrown. Following are the methods to start the capture of screen and processSampleBuffer.
Method to setup writers for screen capture
func startCapture() {
_filename = UUID().uuidString
let videoPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(_filename).mp4")
let writer = try! AVAssetWriter(outputURL: videoPath, fileType: .mp4)
let screen = UIScreen.main.bounds
let screenBounds = screen.size
let videoCompressionPropertys = [
AVVideoAverageBitRateKey: screenBounds.width * screenBounds.height * 10.1
]
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: screenBounds.width,
AVVideoHeightKey: screenBounds.height,
AVVideoCompressionPropertiesKey: videoCompressionPropertys
]
let input = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
input.expectsMediaDataInRealTime = true
if writer.canAdd(input) {
writer.add(input)
}
// Add the app audio input
var acl = AudioChannelLayout()
memset(&acl, 0, MemoryLayout<AudioChannelLayout>.size)
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
let audioOutputSettings: [String: Any] =
[ AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey : 44100,
AVNumberOfChannelsKey : 1,
AVEncoderBitRateKey : 128000,
AVChannelLayoutKey : Data(bytes: &acl, count: MemoryLayout<AudioChannelLayout>.size)]
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioOutputSettings)
audioInput.expectsMediaDataInRealTime = true
if (writer.canAdd(audioInput)) {
writer.add(audioInput)
}
// Add the mic audio input
let audioOutputSettings1: [String: Any] =
[ AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey : 24000,
AVNumberOfChannelsKey : 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue]
let micAudioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioOutputSettings1)
micAudioInput.expectsMediaDataInRealTime = true
if (writer.canAdd(micAudioInput)) {
writer.add(micAudioInput)
}
writer.startWriting()
_audioAssetWriterInput = audioInput
_micAssetWriterInput = micAudioInput
_assetWriterInput = input
_assetWriter = writer
}
processSampleBuffer
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
if startTime == nil {
startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
_assetWriter!.startSession(atSourceTime: CMTime.zero)
}
if sampleBufferType == RPSampleBufferType.video {
if let _assetWriterInput = _assetWriterInput {
if _assetWriterInput.isReadyForMoreMediaData {
let appended = _assetWriterInput.append(sampleBuffer)
print(appended)
if !appended {
let status = _assetWriter?.status
let error = _assetWriter?.error
print("cannot append video")
}
}
}
}
if sampleBufferType == RPSampleBufferType.audioApp {
if let _assetWriterInput = _audioAssetWriterInput {
if _assetWriterInput.isReadyForMoreMediaData {
let appended = _assetWriterInput.append(sampleBuffer)
print(appended)
if !appended {
let status = _assetWriter?.status
let error = _assetWriter?.error
print("cannot append app audio")
}
}
}
}
if sampleBufferType == RPSampleBufferType.audioMic {
if let _assetWriterInput = _micAssetWriterInput {
if _assetWriterInput.isReadyForMoreMediaData {
let appended = _assetWriterInput.append(sampleBuffer)
print(appended)
if !appended {
let status = _assetWriter?.status
let error = _assetWriter?.error
print("cannot append mic audio")
}
}
}
}
if shouldEnd {
_finishWriters()
}
}