Unable to Render Filter in Live Camera Stream

Hi,


I would like to ask some help in rendering filter to camera live stream. I followed the the documenatation here https://developer.apple.com/documentation/avfoundation/cameras_and_media_capture/avcamfilter_applying_filters_to_a_capture_stream#see-also but I'm unable to make it work. Here's one of the error im experiencing.


//
//  ViewController.swift
//  Fooful Camera App
//
//  Created by TokenNews on 14/08/2019.
//  Copyright © 2019 TokenNews. All rights reserved.
//

import UIKit
import AVFoundation
import CoreVideo
import Photos
import MobileCoreServices

class ViewController: UIViewController, AVCapturePhotoCaptureDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureDepthDataOutputDelegate, AVCaptureDataOutputSynchronizerDelegate {
    
    

    
    
   
    @IBOutlet weak var cameraView: PreviewMetalView!
    @IBOutlet weak private var resumeButton: UIButton!
    private var videoFilter: FilterRenderer?
    @IBOutlet weak var camButtonArea2: PreviewMetalView!
    private let filterRenderers: [FilterRenderer] = [RosyCIRenderer()]
    private var renderingEnabled = true
    private var depthVisualizationEnabled = false
    private let videoDataOutput = AVCaptureVideoDataOutput()
    private let photoOutput = AVCapturePhotoOutput()
    private let videoDepthMixer = VideoMixer()
    private let photoDepthMixer = VideoMixer()
    private var currentDepthPixelBuffer: CVPixelBuffer?
    private let videoDeviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera],mediaType: .video,position: .unspecified)
    private var videoInput: AVCaptureDeviceInput!
    private let session = AVCaptureSession()
    private let dataOutputQueue = DispatchQueue(label: "VideoDataQueue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
    private let depthDataOutput = AVCaptureDepthDataOutput()
    private var outputSynchronizer: AVCaptureDataOutputSynchronizer?
    private var isSessionRunning = false
    private let videoDepthConverter = DepthToGrayscaleConverter()
    private let photoDepthConverter = DepthToGrayscaleConverter()
    private let processingQueue = DispatchQueue(label: "photo processing queue", attributes: [], autoreleaseFrequency: .workItem)
    private let sessionQueue = DispatchQueue(label: "SessionQueue", attributes: [], autoreleaseFrequency: .workItem)
    @IBOutlet weak private var cameraUnavailableLabel: UILabel!
    
    private var photoFilter: FilterRenderer?
    
    private enum SessionSetupResult {
        case success
        case notAuthorized
        case configurationFailed
    }
    
    private var setupResult: SessionSetupResult = .success
    var captureSession = AVCaptureSession()
    
    var previewLayer:CALayer!
    
    var captureDevice:AVCaptureDevice!
    var takePhoto = false
    var frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
    var backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
    
    var btnFilter: Bool = true

    

    let myColor = UIColor(red: 46/255, green: 204/255, blue: 113/255, alpha: 1.0)
    
    
//    struct Filter {
//        let filterName: String
//        var filterEffectValue: Any?
//        var filterEffectValueName: String?
//
//        init(filterName: String, filterEffectValue: Any?, filterEffectValueName: String?){
//            self.filterName = filterName
//            self.filterEffectValue = filterEffectValue
//            self.filterEffectValueName = filterEffectValueName
//
//        }
//    }
//
    
    
    override func viewDidLoad() {
        super.viewDidLoad()
        camButtonArea.isHidden = false
        scrollViewFilters.isHidden = true
        camButtonArea2.isHidden = true
        setNormalBtn.setTitleColor(myColor, for: UIControl.State.normal)
        setNormalBtn.setImage(UIImage(named:"icon_camera_normal_green.png"), for: .normal)
        
        self.videoFilter = self.filterRenderers[0]
        
        print("test \(self.filterRenderers[0].isPrepared)")
        prepareCamera()
        beginSession()
    }
    
    
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        
        
        self.session.startRunning()
        self.isSessionRunning = self.session.isRunning
        self.addObservers()
    }
    
    func prepareCamera() {
        
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
        
        if let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [ .builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices.first {
            
            captureDevice = availableDevices
            
            
        }
    }
    
    func beginSession() {
        

//        let defaultVideoDevice: AVCaptureDevice? = videoDeviceDiscoverySession.devices.first
//
//        guard let videoDevice = defaultVideoDevice else {
//            print("Could not find any video device")
//            setupResult = .configurationFailed
//            return
//        }
//
//
//
//        do {
//            let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
//            captureSession.addInput(captureDeviceInput)
//        }catch{
//            print(error.localizedDescription)
//        }
//
//         let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
//            self.previewLayer = previewLayer
//
//            self.previewLayer.frame = self.cameraView.layer.bounds
//
//            self.cameraView.layer.addSublayer(self.previewLayer!)
//
//            previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
//
//
//            captureSession.startRunning()
//
//            let dataOutput = AVCaptureVideoDataOutput()
//        dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
//
//            dataOutput.alwaysDiscardsLateVideoFrames = true
//
//            if captureSession.canAddOutput(dataOutput){
//                captureSession.addOutput(dataOutput)
//            }
//
//            capFrameRate(videoDevice: videoDevice)
//            captureSession.commitConfiguration()
//
//            let queue = DispatchQueue(label: "com.tokennews.captureQueue")
//
//            dataOutput.setSampleBufferDelegate(self, queue: queue)
//
//            print(dataOutput.setSampleBufferDelegate(self, queue: queue))
//
//
//        ------------------------------------------------------------------------------
        print("configureSession")
        if setupResult != .success {
            return
        }
        
        let defaultVideoDevice: AVCaptureDevice? = videoDeviceDiscoverySession.devices.first
        
        guard let videoDevice = defaultVideoDevice else {
            print("Could not find any video device")
            setupResult = .configurationFailed
            return
        }
        
        do {
            videoInput = try AVCaptureDeviceInput(device: videoDevice)
        } catch {
            print("Could not create video device input: \(error)")
            setupResult = .configurationFailed
            return
        }
        
        session.beginConfiguration()
        
        session.sessionPreset = AVCaptureSession.Preset.photo
        
        // Add a video input.
        guard session.canAddInput(videoInput) else {
            print("Could not add video device input to the session")
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        session.addInput(videoInput)
        
        // Add a video data output
        if session.canAddOutput(videoDataOutput) {
            session.addOutput(videoDataOutput)
            videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
            videoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
        } else {
            print("Could not add video data output to the session")
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        
        // Add photo output
        if session.canAddOutput(photoOutput) {
            session.addOutput(photoOutput)
            
            photoOutput.isHighResolutionCaptureEnabled = true
            
            if depthVisualizationEnabled {
                if photoOutput.isDepthDataDeliverySupported {
                    photoOutput.isDepthDataDeliveryEnabled = true
                } else {
                    depthVisualizationEnabled = false
                }
            }
            
        } else {
            print("Could not add photo output to the session")
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        
        // Add a depth data output
        if session.canAddOutput(depthDataOutput) {
            session.addOutput(depthDataOutput)
            depthDataOutput.setDelegate(self, callbackQueue: dataOutputQueue)
            depthDataOutput.isFilteringEnabled = false
            if let connection = depthDataOutput.connection(with: .depthData) {
                connection.isEnabled = depthVisualizationEnabled
            } else {
                print("No AVCaptureConnection")
            }
        } else {
            print("Could not add depth data output to the session")
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        
        if depthVisualizationEnabled {
            // Use an AVCaptureDataOutputSynchronizer to synchronize the video data and depth data outputs.
            // The first output in the dataOutputs array, in this case the AVCaptureVideoDataOutput, is the "master" output.
            outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [videoDataOutput, depthDataOutput])
            if let unwrappedOutputSynchronizer = outputSynchronizer {
                unwrappedOutputSynchronizer.setDelegate(self, queue: dataOutputQueue)
            }
        } else {
            outputSynchronizer = nil
        }
        
        capFrameRate(videoDevice: videoDevice)
        
        session.commitConfiguration()
        
//        DispatchQueue.main.async {
//            self.depthDataMaxFrameRateValueLabel.text = String(format: "%.1f", self.depthDataMaxFrameRateSlider.value)
//            self.mixFactorValueLabel.text = String(format: "%.1f", self.mixFactorSlider.value)
//            self.depthDataMaxFrameRateSlider.minimumValue = Float(1) / Float(CMTimeGetSeconds(videoDevice.activeVideoMaxFrameDuration))
//            self.depthDataMaxFrameRateSlider.maximumValue = Float(1) / Float(CMTimeGetSeconds(videoDevice.activeVideoMinFrameDuration))
//            self.depthDataMaxFrameRateSlider.value = (self.depthDataMaxFrameRateSlider.minimumValue
//                + self.depthDataMaxFrameRateSlider.maximumValue) / 2
//        }
    }
    
    
    
    func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
       
        if let syncedDepthData: AVCaptureSynchronizedDepthData = synchronizedDataCollection.synchronizedData(for: depthDataOutput) as? AVCaptureSynchronizedDepthData {
            if !syncedDepthData.depthDataWasDropped {
                let depthData = syncedDepthData.depthData
                processDepth(depthData: depthData)
            }
        }
        
        if let syncedVideoData: AVCaptureSynchronizedSampleBufferData = synchronizedDataCollection.synchronizedData(for: videoDataOutput) as? AVCaptureSynchronizedSampleBufferData {
            if !syncedVideoData.sampleBufferWasDropped {
                let videoSampleBuffer = syncedVideoData.sampleBuffer
                processVideo(sampleBuffer: videoSampleBuffer)
            }
        }
    }
    
    func processDepth(depthData: AVDepthData) {
        print("processDepth")
        if !renderingEnabled {
            return
        }
        
        if !depthVisualizationEnabled {
            return
        }
        
        if !videoDepthConverter.isPrepared {
            var depthFormatDescription: CMFormatDescription?
            CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault,
                                                         imageBuffer: depthData.depthDataMap,
                                                         formatDescriptionOut: &depthFormatDescription)
            if let unwrappedDepthFormatDescription = depthFormatDescription {
                videoDepthConverter.prepare(with: unwrappedDepthFormatDescription, outputRetainedBufferCountHint: 2)
            }
        }
        
        guard let depthPixelBuffer = videoDepthConverter.render(pixelBuffer: depthData.depthDataMap) else {
            print("Unable to process depth")
            return
        }
        
        currentDepthPixelBuffer = depthPixelBuffer
    }
    @IBOutlet weak var takePhotoOutlet: UIButton!
    @IBAction func takePhoto(_ sender: Any) {
        takePhoto = true
    }
    
    @IBOutlet weak var takePhoto2: UIButton!
    @IBAction func takePhoto2(_ sender: Any) {
        takePhoto = true
        
    }
    
    
    
    
    @IBAction func rotateCamera(_ sender: Any) {
        
        guard let currentCameraInput: AVCaptureInput = captureSession.inputs.first else {
            return
        }
        
        
        if let input = currentCameraInput as? AVCaptureDeviceInput {
            
            if input.device.position == .back {
                switchToFront()
            }

            if input.device.position == .front {
                switchToBack()
            }
            
        }
    }
    
    @IBOutlet weak var camButtonArea: UIView!


    @IBOutlet weak var scrollViewFilters: UIScrollView!
    
    
    @IBOutlet weak var btnRec: UIButton!
    
    @IBAction func btnRec(_ sender: Any) {
        
        camButtonArea.isHidden = true
        scrollViewFilters.isHidden = false
        
        btnFilter = !btnFilter

        btnRec.setTitleColor(myColor, for: UIControl.State.normal)
        btnRec.setImage(UIImage(named:"icon_camera_filter_green.png"), for: .normal)
        
        setNormalBtn.setTitleColor(UIColor.black, for: UIControl.State.normal)
        setNormalBtn.setImage(UIImage(named:"icon_camera_normal.png"), for: .normal)
        brightBtn.setTitleColor(UIColor.black, for: UIControl.State.normal)
        brightBtn.setImage(UIImage(named:"icon_camera_bright.png"), for: .normal)
        camButtonArea2.isHidden = false
    }
    
    
    @IBOutlet weak var setNormalBtn: UIButton!
    
    @IBAction func setNormalBtn(_ sender: Any) {
        camButtonArea.isHidden = false
        scrollViewFilters.isHidden = true
        
        setNormalBtn.setTitleColor(myColor, for: UIControl.State.normal)
        setNormalBtn.setImage(UIImage(named:"icon_camera_normal_green.png"), for: .normal)
        
        btnRec.setTitleColor(UIColor.black, for: UIControl.State.normal)
        btnRec.setImage(UIImage(named:"icon_camera_filter.png"), for: .normal)
        brightBtn.setTitleColor(UIColor.black, for: UIControl.State.normal)
        brightBtn.setImage(UIImage(named:"icon_camera_bright.png"), for: .normal)
        

        camButtonArea2.isHidden = true
    }
    
    @IBOutlet weak var brightBtn: UIButton!
    
    @IBAction func brightBtn(_ sender: Any) {
   
        camButtonArea.isHidden = true
        brightBtn.setTitleColor(myColor, for: UIControl.State.normal)
        brightBtn.setImage(UIImage(named:"icon_camera_bright_green.png"), for: .normal)
        
        setNormalBtn.setTitleColor(UIColor.black, for: UIControl.State.normal)
        setNormalBtn.setImage(UIImage(named:"icon_camera_normal.png"), for: .normal)
        btnRec.setTitleColor(UIColor.black, for: UIControl.State.normal)
        btnRec.setImage(UIImage(named:"icon_camera_filter.png"), for: .normal)
        

        camButtonArea2.isHidden = false
        
    }
    @IBAction func flash(_ sender: Any) {
        
        
    }
  
    
    func switchToFront() {
        
        if frontCamera?.isConnected == true {
            captureSession.stopRunning()
            let captureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
            do {
                
                let input = try AVCaptureDeviceInput(device: captureDevice!)
                
                captureSession = AVCaptureSession()
                captureSession.addInput(input)
                let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                self.previewLayer = previewLayer
                
                self.previewLayer.frame = self.cameraView.layer.bounds
                self.cameraView.layer.addSublayer(self.previewLayer!)
                previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
                captureSession.startRunning()
                
                let dataOutput = AVCaptureVideoDataOutput()
                dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
                
                dataOutput.alwaysDiscardsLateVideoFrames = true
                
                if captureSession.canAddOutput(dataOutput){
                    captureSession.addOutput(dataOutput)
                }
                
                captureSession.commitConfiguration()
                
                let queue = DispatchQueue(label: "com.tokennews.captureQueue")
                
                dataOutput.setSampleBufferDelegate(self, queue: queue)
                
            }
            
            catch {
                print(error)
            }
        }
        
    }
    
    func switchToBack(){

        if backCamera?.isConnected == true {
            captureSession.stopRunning()
            let captureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
            do {
                
                let input = try AVCaptureDeviceInput(device: captureDevice!)
                
                captureSession = AVCaptureSession()
                captureSession.addInput(input)
                let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                self.previewLayer = previewLayer
                
                self.previewLayer.frame = self.cameraView.layer.bounds
                self.cameraView.layer.addSublayer(self.previewLayer!)
                previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
                captureSession.startRunning()
                
                let dataOutput = AVCaptureVideoDataOutput()
                dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
                
                dataOutput.alwaysDiscardsLateVideoFrames = true
                
                if captureSession.canAddOutput(dataOutput){
                    captureSession.addOutput(dataOutput)
                }
                
                captureSession.commitConfiguration()
                
                let queue = DispatchQueue(label: "com.tokennews.captureQueue")
                
                dataOutput.setSampleBufferDelegate(self, queue: queue)
                
            }
                
            catch {
                
                print(error)
            }
        }
        
    }
    
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
        processVideo(sampleBuffer: sampleBuffer)
//        let comicEffect = CIFilter(name: "CIColorMatrix")
//
//        let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
//        let cameraImage = CIImage(cvImageBuffer: pixelBuffer!)
//
//        comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)
//
//        let cgImage = self.context.createCGImage(comicEffect!.outputImage!, from: cameraImage.extent)!
//
        
        if takePhoto {
            takePhoto = false
            
//            if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
            
                let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoViewController
                
//                let filteredImage = UIImage(cgImage: cgImage)
            
//                photoVC.takenPhoto = filteredImage
                DispatchQueue.main.async {
                    self.present(photoVC, animated: true, completion: {
                        self.stopCaptureSession()
                    })
                    
                    
                }
//            }
        }
        
    }
    
    
    func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
        
        
        if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer){
            let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            let context = CIContext()
            
            let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
            
            if let image = context.createCGImage(ciImage, from: imageRect) {
                return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
            }
            
            
//            let comicEffect = CIFilter(name: "CIComicEffect")
//            let cgImage = self.context.createCGImage(comicEffect!.outputImage!, from: ciImage.extent)!
//
//            comicEffect!.setValue(CIVector(x: 0, y: 0, z: 0, w: 0), forKey: "inputGVector")
//            
//            let filteredImage = UIImage(cgImage: cgImage)
            
//            DispatchQueue.main.async {
//
//                self.cameraView.layer = filteredImage
//            }
            
        }
        
        return nil
    }
    
    func stopCaptureSession () {
        self.captureSession.stopRunning()
        
        if let inputs = captureSession.inputs as?  [AVCaptureDeviceInput] {
            for input in inputs {
                self.captureSession.removeInput(input)
            }
        }
    }
    
    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func processVideo(sampleBuffer: CMSampleBuffer) {
       
//        print(CMSampleBufferGetDataBuffer(sampleBuffer)!)
        
        if !renderingEnabled {
            return
        }
        
        guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
            let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else {
                return
        }
        
        var finalVideoPixelBuffer = videoPixelBuffer
        if let filter = videoFilter {
            
            
            if !filter.isPrepared {
                /*
                 outputRetainedBufferCountHint is the number of pixel buffers the renderer retains. This value informs the renderer
                 how to size its buffer pool and how many pixel buffers to preallocate. Allow 3 frames of latency to cover the dispatch_async call.
                 */
                filter.prepare(with: formatDescription, outputRetainedBufferCountHint: 3)
                print("filter is now \(filter.isPrepared)")
                
            }
            
            
            // Send the pixel buffer through the filter
            guard let filteredBuffer = filter.render(pixelBuffer: finalVideoPixelBuffer) else {
                print("Unable to filter video buffer")
                
                return
            }
            
            finalVideoPixelBuffer = filteredBuffer
        }
        
        if depthVisualizationEnabled {
            if !videoDepthMixer.isPrepared {
                videoDepthMixer.prepare(with: formatDescription, outputRetainedBufferCountHint: 3)
            }

            if let depthBuffer = currentDepthPixelBuffer {

                // Mix the video buffer with the last depth data received.
                guard let mixedBuffer = videoDepthMixer.mix(videoPixelBuffer: finalVideoPixelBuffer, depthPixelBuffer: depthBuffer) else {
                    print("Unable to combine video and depth")
                    return
                }

                finalVideoPixelBuffer = mixedBuffer
            }
        }
        
        cameraView.pixelBuffer = finalVideoPixelBuffer
    }
    
    
    // MARK: - Utilities
    private func capFrameRate(videoDevice: AVCaptureDevice) {
       
        print("capFrameRate")
        if self.photoOutput.isDepthDataDeliverySupported {
            // Cap the video framerate at the max depth framerate.
            if let frameDuration = videoDevice.activeDepthDataFormat?.videoSupportedFrameRateRanges.first?.minFrameDuration {
                do {
                    try videoDevice.lockForConfiguration()
                    videoDevice.activeVideoMinFrameDuration = frameDuration
                    videoDevice.unlockForConfiguration()
                } catch {
                    print("Could not lock device for configuration: \(error)")
                }
            }
        }
    }
    
    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        
        return .all
    }
    
    private var sessionRunningContext = 0
    
    private func addObservers() {
        print("addObservers")
        NotificationCenter.default.addObserver(self,
                                               selector: #selector(didEnterBackground),
                                               name: UIApplication.didEnterBackgroundNotification,
                                               object: nil)
        
        NotificationCenter.default.addObserver(self,
                                               selector: #selector(willEnterForground),
                                               name: UIApplication.willEnterForegroundNotification,
                                               object: nil)
        
        NotificationCenter.default.addObserver(self,
                                               selector: #selector(thermalStateChanged),
                                               name: ProcessInfo.thermalStateDidChangeNotification,
                                               object: nil)
        
        NotificationCenter.default.addObserver(self,
                                               selector: #selector(sessionRuntimeError),
                                               name: NSNotification.Name.AVCaptureSessionRuntimeError,
                                               object: session)
        
        session.addObserver(self, forKeyPath: "running", options: NSKeyValueObservingOptions.new, context: &sessionRunningContext)
        
        // A session can run only when the app is full screen. It will be interrupted in a multi-app layout.
        // Add observers to handle these session interruptions and inform the user.
        // See AVCaptureSessionWasInterruptedNotification for other interruption reasons.
        
        NotificationCenter.default.addObserver(self,
                                               selector: #selector(sessionWasInterrupted),
                                               name: NSNotification.Name.AVCaptureSessionWasInterrupted,
                                               object: session)
        
        NotificationCenter.default.addObserver(self,
                                               selector: #selector(sessionInterruptionEnded),
                                               name: NSNotification.Name.AVCaptureSessionInterruptionEnded,
                                               object: session)
        
        NotificationCenter.default.addObserver(self,
                                               selector: #selector(subjectAreaDidChange),
                                               name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange,
                                               object: videoInput.device)
    }

    @objc
    func didEnterBackground(notification: NSNotification) {
        print("didEnterBackground")
        // Free up resources.
        dataOutputQueue.async {
            self.renderingEnabled = false
            if let videoFilter = self.videoFilter {
                videoFilter.reset()
            }
            self.videoDepthMixer.reset()
            self.currentDepthPixelBuffer = nil
            self.videoDepthConverter.reset()
            self.cameraView.pixelBuffer = nil
            self.cameraView.flushTextureCache()
        }
        processingQueue.async {
            if let photoFilter = self.photoFilter {
                photoFilter.reset()
            }
            self.photoDepthMixer.reset()
            self.photoDepthConverter.reset()
        }
    }
    
    @objc
    func willEnterForground(notification: NSNotification) {
        print("willEnterForground")
        dataOutputQueue.async {
            self.renderingEnabled = true
        }
    }
    
    // Use this opportunity to take corrective action to help cool the system down.
    @objc
    func thermalStateChanged(notification: NSNotification) {
        print("thermalStateChanged")
        if let processInfo = notification.object as? ProcessInfo {
            showThermalState(state: processInfo.thermalState)
        }
    }
    
    func showThermalState(state: ProcessInfo.ThermalState) {
        print("showThermalState")
        DispatchQueue.main.async {
            var thermalStateString = "UNKNOWN"
            if state == .nominal {
                thermalStateString = "NOMINAL"
            } else if state == .fair {
                thermalStateString = "FAIR"
            } else if state == .serious {
                thermalStateString = "SERIOUS"
            } else if state == .critical {
                thermalStateString = "CRITICAL"
            }
            
            let message = NSLocalizedString("Thermal state: \(thermalStateString)", comment: "Alert message when thermal state has changed")
            let actions = [
                UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
                              style: .cancel,
                              handler: nil)]
            
            self.alert(title: "AVCamFilter", message: message, actions: actions)
        }
    }
    
    func alert(title: String, message: String, actions: [UIAlertAction]) {
        
        print("alert")
        let alertController = UIAlertController(title: title,
                                                message: message,
                                                preferredStyle: .alert)
        
        actions.forEach {
            alertController.addAction($0)
        }
        
        self.present(alertController, animated: true, completion: nil)
    }
    
    private func focus(with focusMode: AVCaptureDevice.FocusMode, exposureMode: AVCaptureDevice.ExposureMode, at devicePoint: CGPoint, monitorSubjectAreaChange: Bool) {
        
        print("focus")
        
        sessionQueue.async {
            let videoDevice = self.videoInput.device
            
            do {
                try videoDevice.lockForConfiguration()
                if videoDevice.isFocusPointOfInterestSupported && videoDevice.isFocusModeSupported(focusMode) {
                    videoDevice.focusPointOfInterest = devicePoint
                    videoDevice.focusMode = focusMode
                }
                
                if videoDevice.isExposurePointOfInterestSupported && videoDevice.isExposureModeSupported(exposureMode) {
                    videoDevice.exposurePointOfInterest = devicePoint
                    videoDevice.exposureMode = exposureMode
                }
                
                videoDevice.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
                videoDevice.unlockForConfiguration()
            } catch {
                print("Could not lock device for configuration: \(error)")
            }
        }
    }
    @objc
    func sessionRuntimeError(notification: NSNotification) {
        print("sessionRuntimeError")
        guard let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else {
            return
        }
        
        let error = AVError(_nsError: errorValue)
        print("Capture session runtime error: \(error)")
        
        /*
         Automatically try to restart the session running if media services were
         reset and the last start running succeeded. Otherwise, enable the user
         to try to resume the session running.
         */
        if error.code == .mediaServicesWereReset {
            sessionQueue.async {
                if self.isSessionRunning {
                    self.session.startRunning()
                    self.isSessionRunning = self.session.isRunning
                } else {
                    DispatchQueue.main.async {
//                        self.resumeButton.isHidden = false
                    }
                }
            }
        } else {
//            resumeButton.isHidden = false
        }
    }
    
    @objc
    func sessionWasInterrupted(notification: NSNotification) {
        print("sessionWasInterrupted")
        // In iOS 9 and later, the userInfo dictionary contains information on why the session was interrupted.
        if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?,
            let reasonIntegerValue = userInfoValue.integerValue,
            let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
            print("Capture session was interrupted with reason \(reason)")
            
            if reason == .videoDeviceInUseByAnotherClient {
                // Simply fade-in a button to enable the user to try to resume the session running.
                resumeButton.isHidden = false
                resumeButton.alpha = 0.0
                UIView.animate(withDuration: 0.25) {
                    self.resumeButton.alpha = 1.0
                }
            } else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps {
                // Simply fade-in a label to inform the user that the camera is unavailable.
                cameraUnavailableLabel.isHidden = false
                cameraUnavailableLabel.alpha = 0.0
                UIView.animate(withDuration: 0.25) {
                    self.cameraUnavailableLabel.alpha = 1.0
                }
            }
        }
    }
    
    @objc
    func sessionInterruptionEnded(notification: NSNotification) {
        print("sessionInterruptionEnded")
        if !resumeButton.isHidden {
            UIView.animate(withDuration: 0.25,
                           animations: {
                            self.resumeButton.alpha = 0
            }, completion: { _ in
                self.resumeButton.isHidden = true
            }
            )
        }
        if !cameraUnavailableLabel.isHidden {
            UIView.animate(withDuration: 0.25,
                           animations: {
                            self.cameraUnavailableLabel.alpha = 0
            }, completion: { _ in
                self.cameraUnavailableLabel.isHidden = true
            }
            )
        }
    }
    
    @objc
    func subjectAreaDidChange(notification: NSNotification) {
        print("subjectAreaDidChange")
        let devicePoint = CGPoint(x: 0.5, y: 0.5)
        focus(with: .continuousAutoFocus, exposureMode: .continuousAutoExposure, at: devicePoint, monitorSubjectAreaChange: false)
    }
}


I hope someone can help me with this. Thank you.

Unable to Render Filter in Live Camera Stream
 
 
Q