How to save video asynchronously?

Hello! I'm trying to save videos asynchronously. I've already used performChanges without the completionHandler, but it didn't work. Can you give me an example? Consider that the variable with the file URL is named fileURL. What would this look like asynchronously?

Replies

could you post a snippet of your exact code that you're referencing? Have you looked into the "Concurrency Note" on this page which details the ins and outs of async calls? https://developer.apple.com/documentation/photokit/phphotolibrary/1620743-performchanges

  • classe SayTest: NSObject {

    // …

    init(texts: Stringh) {

    // …

    DispatchQueue.global().async { do { try self.nextText(texts) msgErro("Video created.") } catch { // … } } }

    func nextTexto(_ textos: [String]) throws {

    // …

    let assetWriter = try AVAssetWriter(outputURL: url, fileType: .mp4)

    // …

    assetWriter.finishWriting { switch assetWriter.status { case .completed: msgRelatos("Operation completed.") UISaveVideoAtPathToSavedPhotosAlbum(url.path, nil, nil, nil) case .failed: // … } } } }

Add a Comment

Hello,

I'm putting an audio of AVSpeechSynthesizer.write() in a video and some photos.

I tried to put a very long text, to the point of having made a video of around 50 minutes. When saving the video to the gallery, the APP would freeze until it was saved. In other cases, the APP would crash and I would have to compile again.

I tried to use PHPhotoLibrary.shared().performChanges() instead of UISaveVideoAtPathToSavedPhotosAlbum . But the APP would crash until you saved the video to the gallery or the APP it crashed and wouldn't come back.

Here's the code:

private let synthesizer = AVSpeechSynthesizer()
private var counterImage = 0

let semaphore = DispatchSemaphore(value: 0)

init(_ texts: [String]) {
Misc.obj.lData.removeAll()
Misc.obj.selectedPhotos.append(createBlueImage(CGSize(width: 100, height: 100)))
Misc.obj.selectedPhotos.append(createBlueImage(CGSize(width: 100, height: 100)))
super.init()
synthesizer.delegate = self

DispatchQueue.global().async {
do {
try self.nextText(texts)
msgErro("Completed.")
} catch {
msgErro(error.localizedDescription)
}
}
}

func nextText(_ texts: [String]) throws {
var audioBuffers = [CMSampleBuffer]()
var videoBuffers = [CVPixelBuffer]()
var lTime = [0.0]

for text in texts {
var time = Double.zero
var duration = AVAudioFrameCount.zero

let utterance = AVSpeechUtterance(string: texto)
utterance.voice = AVSpeechSynthesisVoice(language: "pt-BR")
utterance.rate = 0.2

synthesizer.write(utterance) { buffer in
if let buffer = buffer as? AVAudioPCMBuffer, let sampleBuffer = buffer.toCMSampleBuffer(presentationTime: .zero) {
audioBuffers.append(sampleBuffer)
duration += buffer.frameLength
time += Double(buffer.frameLength) / buffer.format.sampleRate
}
}

semaphore.wait()

if Misc.obj.selectedPhotos.indices.contains(contadorImagem) {
let image = Misc.obj.selectedPhotos[counterImage]
//let imagemEscrita = imagem.addTexto(textos[quantTxt])
let pixelBuffer = image.toCVPixelBuffer()
videoBuffers.append(pixelBuffer!)
lTime.append(time)

// Increase counterImage
counterImage += 1

if counterImage == Misc.obj.selectedPhotos.count {
counterImage = 0
}
}
}

let url = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("*****/output.mp4")
try FileManager.default.createDirectory(at: url.deletingLastPathComponent(), withIntermediateDirectories: true)

if FileManager.default.fileExists(atPath: url.path()) {
try FileManager.default.removeItem(at: url)
}

let audioProvider = SampleProvider(buffers: audioBuffers)
let videoProvider = SampleProvider(buffers: videoBuffers, lTempo: lTempo)

let audioInput = createAudioInput(audioBuffers: audioBuffers)
let videoInput = createVideoInput(videoBuffers: videoBuffers)
let adaptor = createPixelBufferAdaptor(videoInput: videoInput)

let assetWriter = try AVAssetWriter(outputURL: url, fileType: .mp4)
assetWriter.add(videoInput)
assetWriter.add(audioInput)
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)

let writerQueue = DispatchQueue(label: "Asset Writer Queue")

videoInput.requestMediaDataWhenReady(on: writerQueue) {
if let buffer = videoProvider.getNextBuffer() {
adaptor.append(buffer, withPresentationTime: videoProvider.getPresentationTime())
} else {
videoInput.markAsFinished()

if audioProvider.isFinished() {
self.semaphore.signal()
}
}
}

audioInput.requestMediaDataWhenReady(on: writerQueue) {
if let buffer = audioProvider.getNextBuffer() {
audioInput.append(buffer)
} else {
audioInput.markAsFinished()

if audioProvider.isFinished() {
self.semaphore.signal()
}
}
}

semaphore.wait()

assetWriter.finishWriting {
switch assetWriter.status {
case .completed:
msgRelatos("Completed.")
UISaveVideoAtPathToSavedPhotosAlbum(url.path, nil, nil, nil)
case .failed:
if let error = assetWriter.error {
msgErro("Error: \(error.localizedDescription)")
} else {
msgRelatos("No recorded.")
}
default:
msgRelatos("Error not found.")
}
}
}
}

extension TesteFala: AVSpeechSynthesizerDelegate {
func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) {
semaphore.signal()
}
}