I'm trying to take a local m4a file and compress/down-sample this file (For the purposes of making a smaller file). I getting error in @objc func nextTapped().
Error code: assetWriter_finishBuildingAudioTrackWithSourceFormatDescription signalled err=-12413 (kFigAssetWriterError_InappropriateSourceFormat) (AssetWriter can only compress LPCM audio) at /Library/Caches/com.apple.xbs/Sources/EmbeddedCoreMedia_Sim/EmbeddedCoreMedia-2765.6/Prototypes/Export/FigAssetWriter.c:636
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:] Cannot append sample buffer: Input buffer must be in an uncompressed format when outputSettings is not nil'
here some code:
import UIKit
import AVFoundation
class RecordWhistleViewController: UIViewController, AVAudioRecorderDelegate {
var stackView: UIStackView!
var recordButton: UIButton!
var recordingSession: AVAudioSession!
var whistleRecorder: AVAudioRecorder!
var asetWriter: AVAssetWriter!
var asetWriterInput: AVAssetWriterInput!
override func viewDidLoad() {
super.viewDidLoad()
title = "Record your whistle"
navigationItem.backBarButtonItem = UIBarButtonItem(title: "Record", style: .plain, target: nil, action: nil)
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession.setCategory(.playAndRecord, mode: .default)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission() { [unowned self] allowed in
DispatchQueue.main.async {
if allowed {
self.loadRecordingUI()
} else {
self.loadFailUI()
}
}
}
} catch {
self.loadFailUI()
}
}
class func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
class func getWhistleURL() -> URL {
return getDocumentsDirectory().appendingPathComponent("whistle.m4a")
}
func startRecording() {
view.backgroundColor = UIColor(red: 0.6, green: 0, blue: 0, alpha: 1)
recordButton.setTitle("Tap to Stop", for: .normal)
let audioURL = RecordWhistleViewController.getWhistleURL()
print(audioURL.absoluteString)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
do {
whistleRecorder = try AVAudioRecorder(url: audioURL, settings: settings)
whistleRecorder.delegate = self
whistleRecorder.record()
} catch {
finishRecording(success: false)
}
}
func finishRecording(success: Bool) {
view.backgroundColor = UIColor(red: 0, green: 0.6, blue: 0, alpha: 1)
whistleRecorder.stop()
whistleRecorder = nil
if success {
recordButton.setTitle("Tap to Re-record", for: .normal)
navigationItem.rightBarButtonItem = UIBarButtonItem(title: "Next", style: .plain, target: self, action: #selector(nextTapped))
} else {
recordButton.setTitle("Tap to Record", for: .normal)
let ac = UIAlertController(title: "Record failed", message: "There was a problem recording your whistle; please try again.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
}
}
@objc func nextTapped() {
let audioURL = RecordWhistleViewController.getWhistleURL()
var asset = AVAsset.init(url: audioURL)
let exportPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("out.m4a").path
print("export PATH IS \(exportPath)")
let exportURL = URL(fileURLWithPath: exportPath)
var readerError: Error? = nil
var reader: AVAssetReader? = nil
do {
reader = try AVAssetReader(asset: asset)
} catch {
print("error in reader \(error)")
}
let track = asset.tracks(withMediaType: .audio)[0]
let readerOutput = AVAssetReaderTrackOutput(track: track, outputSettings: nil)
reader?.add(readerOutput)
var writerError: Error? = nil
var writer: AVAssetWriter? = nil
do {
writer = try AVAssetWriter(outputURL: exportURL, fileType: .m4a)
} catch {
print("ERROR IN writer \(error)")
}
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size)
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo
let outputSettings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 2,
AVEncoderBitRateKey: 128000,
AVChannelLayoutKey: Data(bytes: &channelLayout, count: MemoryLayout<AudioChannelLayout>.size)
] as [String : Any]
let writerInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings as? [String: Any])
writerInput.expectsMediaDataInRealTime = false
writer?.add(writerInput)
writer?.startWriting()
writer?.startSession(atSourceTime: .zero)
reader?.startReading()
let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
writerInput.requestMediaDataWhenReady(on: mediaInputQueue) {
print("Asset writer ready: \(writerInput.isReadyForMoreMediaData)")
while writerInput.isReadyForMoreMediaData {
var nextBuffer: CMSampleBuffer?
nextBuffer = readerOutput.copyNextSampleBuffer()
if nextBuffer != nil {
if let nextBuffer = nextBuffer {
print("adding buffer")
writerInput.append(nextBuffer)
}
} else {
writerInput.markAsFinished()
reader?.cancelReading()
writer?.finishWriting {
print("Asset writer finished writing")
}
break
}
}
}
}