public func writeData(connection: AVCaptureConnection, video: AVCaptureConnection, audio: AVCaptureConnection, buffer: CMSampleBuffer, isFrontPosition: Bool = false) {
movieWritingQueue.async { [weak self] in
guard let self = self else { return }
self.numberOfThread += 1
if connection == video {
if !self.readyToRecordVideo {
if let sample = CMSampleBufferGetFormatDescription(buffer) {
self.readyToRecordVideo = (self.setupAssetWriterVideoInput(sample)) == nil
}
}
if self.inputsReadyToRecord() {
self.writeSampleBuffer(sampleBuffer: buffer, mediaType: .video)
}
} else if connection == audio {
if !self.readyToRecordAudio {
if let sample = CMSampleBufferGetFormatDescription(buffer) {
self.readyToRecordAudio = (self.setupAssetWriterAudioInput(sample)) == nil
}
}
if self.inputsReadyToRecord() {
self.writeSampleBuffer(sampleBuffer: buffer, mediaType: .audio)
}
}
}
}
public func writeSampleBuffer(sampleBuffer: CMSampleBuffer, mediaType: AVMediaType) {
if movieWriter?.status == .unknown {
if let mw = movieWriter, mw.startWriting() {
mw.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
} else {
logger.i(" --- \(String(describing: movieWriter?.error)) --- ")
}
}
if movieWriter?.status == .writing {
var isVideo = false
if mediaType == .video {
isVideo = true
}
let sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let last = isVideo ? lastVideoTimeOrigin : lastAudioTimeOrigin
if let lastTime = last, lastTime.isValid, lastTime.seconds != 0 {
let lastSecond = lastTime.seconds
let sampleSecond = sampleTime.seconds
if sampleSecond - lastSecond < 0.0016 {
print("drop")
return
}
}
// 记录暂停上一次录制的时间
var ptsOrigin = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let durOrigin = CMSampleBufferGetDuration(sampleBuffer)
if durOrigin.value > 0 {
ptsOrigin = CMTimeAdd(ptsOrigin, durOrigin)
}
if isVideo {
lastVideoTimeOrigin = ptsOrigin
} else {
lastAudioTimeOrigin = ptsOrigin
}
if isDiscont {
isDiscont = false
// 计算暂停的时间
var pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let last = isVideo ? lastVideoTime : lastAudioTime
if let last = last, last.isValid {
if let timeOffset = timeOffset, timeOffset.isValid {
pts = CMTimeSubtract(pts, timeOffset)
}
let offset = CMTimeSubtract(pts, last)
if timeOffset?.value == 0 {
timeOffset = offset
} else {
timeOffset = CMTimeAdd(timeOffset ?? CMTime(), offset);
}
}
lastVideoTime?.flags = .valid
lastAudioTime?.flags = .valid
}
var copyBuffer = sampleBuffer
if timeOffset?.value ?? CMTime().value > 0 {
//根据得到的timeOffset调整
if let buffer = adjustTime(sample: copyBuffer, offset: timeOffset) {
//copyBuffer = buffer
}
}
// 记录暂停上一次录制的时间
var pts = CMSampleBufferGetPresentationTimeStamp(copyBuffer)
let dur = CMSampleBufferGetDuration(copyBuffer)
if dur.value > 0 {
pts = CMTimeAdd(pts, dur)
}
if isVideo {
lastVideoTime = pts
} else {
lastAudioTime = pts
}
if mediaType == .video {
guard let mvi = movieVideoInput, mvi.isReadyForMoreMediaData else { return }
if !mvi.append(copyBuffer) {
logger.i(" --- \(String(describing: movieWriter?.error)) --- ")
}
} else if mediaType == .audio {
guard let mai = movieAudioInput, mai.isReadyForMoreMediaData else { return }
if !mai.append(copyBuffer) {
logger.i(" --- \(String(describing: movieWriter?.error)) --- ")
}
}
// var copyBuffer: CMSampleBuffer? = sampleBuffer
// CMSampleBufferCreateCopy(allocator: nil, sampleBuffer: sampleBuffer, sampleBufferOut: ©Buffer)
// if timeOffset?.value ?? CMTime().value > 0 {
// //根据得到的timeOffset调整
// if let buffer = adjustTime(sample: copyBuffer, offset: timeOffset) {
// copyBuffer = buffer
// }
// }
// guard let buffer = copyBuffer else {
// return
// }
// // 记录暂停上一次录制的时间
// var pts = CMSampleBufferGetPresentationTimeStamp(buffer)
// let dur = CMSampleBufferGetDuration(buffer)
// if dur.value > 0 {
// pts = CMTimeAdd(pts, dur)
// }
// if isVideo {
// lastVideoTime = pts
// } else {
// lastAudioTime = pts
// }
//
// if mediaType == .video {
// guard let mvi = movieVideoInput, mvi.isReadyForMoreMediaData else { return }
// if !mvi.append(buffer) {
// logger.i(" --- \(String(describing: movieWriter?.error)) --- ")
// }
// } else if mediaType == .audio {
// guard let mai = movieAudioInput, mai.isReadyForMoreMediaData else { return }
// if !mai.append(buffer) {
// logger.i(" --- \(String(describing: movieWriter?.error)) --- ")
// }
// }
// copyBuffer = nil
}
}
//调整媒体数据的时间
func adjustTime(sample: CMSampleBuffer?, offset: CMTime?) -> CMSampleBuffer? {
guard let sample = sample, let offset = offset else {
return nil
}
var count: CMItemCount = 0
CMSampleBufferGetSampleTimingInfoArray(sample, entryCount: 0, arrayToFill: nil, entriesNeededOut: &count)
let pInfo = UnsafeMutablePointer<CMSampleTimingInfo>.allocate(capacity: MemoryLayout.size(ofValue: CMSampleTimingInfo.self) * count)
CMSampleBufferGetSampleTimingInfoArray(sample, entryCount: count, arrayToFill: pInfo, entriesNeededOut: &count)
for index in 0...count {
pInfo[index].decodeTimeStamp = CMTimeSubtract(pInfo[index].decodeTimeStamp, offset)
pInfo[index].presentationTimeStamp = CMTimeSubtract(pInfo[index].presentationTimeStamp, offset)
}
var sout: CMSampleBuffer?
CMSampleBufferCreateCopyWithNewTiming(allocator: nil, sampleBuffer: sample, sampleTimingEntryCount: count, sampleTimingArray: pInfo, sampleBufferOut: &sout)
pInfo.deallocate()
return sout
}
i need pause when i take movie ,then i need change sampleBuffers timestamp. but when i add the adjust func. thread crash. i guess the sampleBuffer release before i use it so i try to copy it ,but when i copy the sampleBuffers .the buffers all drop. the reason is outofBuffers . when i used objc, i use CFRetain(). but i cant find that func in swift and i use DispatchQueue, so i cant find exactly where crash