I’ve tried both AVCaptureVideoDataOutputSampleBufferDelegate (captureOutput) and AVCaptureDataOutputSynchronizerDelegate (dataOutputSynchronizer), but the number of depth frames and saved timestamps is significantly lower than the number of frames in the .mp4 file written by AVAssetWriter.
In my code, I save:
- 
Timestamps for each frame to a metadata file
 - 
Depth frames to a binary file
 - 
Video to an .mp4 file
 
If I record a 4-second video at 30fps, the .mp4 file correctly plays for 4 seconds, but the number of stored timestamps and depth frames is much lower—around 70 frames instead of the expected 120.
Does anyone know why this mismatch happens?
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer,
                                didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
        
        // Read all outputs
        guard let syncedDepthData: AVCaptureSynchronizedDepthData =
                synchronizedDataCollection.synchronizedData(for: depthDataOutput) as? AVCaptureSynchronizedDepthData,
              let syncedVideoData: AVCaptureSynchronizedSampleBufferData =
                synchronizedDataCollection.synchronizedData(for: videoDataOutput) as? AVCaptureSynchronizedSampleBufferData else {
            // only work on synced pairs
            return
        }
        
        if syncedDepthData.depthDataWasDropped || syncedVideoData.sampleBufferWasDropped {
            return
        }
        
        let depthData = syncedDepthData.depthData
        let depthPixelBuffer = depthData.depthDataMap
        let sampleBuffer = syncedVideoData.sampleBuffer
        guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
              let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else {
            return
        }
        
        addToPreviewStream?(CIImage(cvPixelBuffer: videoPixelBuffer))
        
        if !canWrite() {
            return
        }
        
        // Extract the presentation timestamp (PTS) from the sample buffer
        let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        
        //sessionAtSourceTime is the first buffer we will write to the file
        if self.sessionAtSourceTime == nil {
            //Make sure we don't start recording until the buffer reaches the correct time (buffer is always behind, this will fix the difference in time)
            guard sampleBuffer.presentationTimeStamp >= self.recordFromTime! else { return }
            self.sessionAtSourceTime = sampleBuffer.presentationTimeStamp
            self.videoWriter!.startSession(atSourceTime: sampleBuffer.presentationTimeStamp)
        }
        
        if self.videoWriterInput!.isReadyForMoreMediaData {
            self.videoWriterInput!.append(sampleBuffer)
            self.videoTimestamps.append(
                Timestamp(
                    frame: videoTimestamps.count,
                    value: timestamp.value,
                    timescale: timestamp.timescale
                )
            )
            
            let ddm = depthData.depthDataMap
            depthCapture.addDepthData(pixelBuffer: ddm, timestamp: timestamp)
        }
    }