// // CreateViewController.swift // ModelInput // // Created by ๅ•ไป•ๆˆ on 2020/7/20. // import UIKit import SnapKit import AVFoundation import CoreMotion protocol PresentDelegate { func close() } final class CreateViewController: UIViewController,AVCaptureDataOutputSynchronizerDelegate { private var captureSession:AVCaptureSession = AVCaptureSession() private var reciveVideoQueue = DispatchQueue(label: "Recive.video") private var cameraView = UIView() var delegate:PresentDelegate? private var currentFile:FileUtil? private var fileDic:[String:Any] = [String:Any]() private var filename = "" private var frameV:UInt64 = 0 private let output = AVCaptureVideoDataOutput() private let depthOutput = AVCaptureDepthDataOutput() private var surportDeppth = false override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view. switch AVCaptureDevice.authorizationStatus(for: .video) { case .notDetermined: AVCaptureDevice.requestAccess(for: .video) { (granted) in if granted { self.setup() }else{ let alert = UIAlertController(title: "Wrong".location(), message: "You have forbidden our APP to access the camera, please go to Settings to enable it".location(), preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK".location(), style: .cancel, handler: { (a) in })) } } case .restricted: let alert = UIAlertController(title: "Wrong".location(), message: "You have forbidden our APP to access the camera, please go to Settings to enable it".location(), preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK".location(), style: .cancel, handler: { (a) in })) case .denied: let alert = UIAlertController(title: "Wrong".location(), message: "You have forbidden our APP to access the camera, please go to Settings to enable it".location(), preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK".location(), style: .cancel, handler: { (a) in })) default: setup() } self.view.addSubview(cameraView) cameraView.snp.makeConstraints { (make) in make.top.bottom.left.right.equalToSuperview() } let btn = UIButton(type: .custom) btn.setImage(UIImage(named: "record"), for: .normal) btn.addTarget(self, action: #selector(onBtnClick(_:)), for: .touchUpInside) self.view.addSubview(btn) btn.snp.makeConstraints { (make) in make.width.height.equalTo(60) make.centerX.equalTo(self.view) make.bottom.equalTo(self.view.safeAreaLayoutGuide.snp.bottom).offset(-30) } } @objc func onBtnClick(_ sender:UIButton) { if CoreMotitonMenager.shard.isStart { CoreMotitonMenager.shard.stopRecord() CoreMotitonMenager.shard.stop() fileDic["frames"] = frameV if fileDic.keys.count > 5 { currentFile?.setupDevice(fileDic) } currentFile?.close() delegate?.close() }else{ if !filename.isEmpty { let doc = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] let fileurl = doc.appendingPathComponent("Model").appendingPathComponent(filename) do { currentFile = try FileUtil(fileurl) } catch { delegate?.close() } } CoreMotitonMenager.shard.startRecord() sender.setImage(UIImage(named: "stop"), for: .normal) } } func setup() { // guard AVCaptureMultiCamSession.isMultiCamSupported else { // print("MultiCam not supported on this device") // return // } fileDic["device"] = UIDevice.current.model if let videoDevice = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: .back) { do{ let depthFormats = videoDevice.activeFormat.supportedDepthDataFormats if depthFormats.count > 0 { let filtered = depthFormats.filter({ CMFormatDescriptionGetMediaSubType($0.formatDescription) == kCVPixelFormatType_DepthFloat16 }) let selectedFormat = filtered.max(by: { first, second in CMVideoFormatDescriptionGetDimensions(first.formatDescription).width < CMVideoFormatDescriptionGetDimensions(second.formatDescription).width }) if let f = selectedFormat { try videoDevice.lockForConfiguration() videoDevice.activeDepthDataFormat = f videoDevice.unlockForConfiguration() } } let videoInput = try AVCaptureDeviceInput(device: videoDevice) if captureSession.canAddInput(videoInput) { captureSession.addInput(videoInput) fileDic["camera"] = "three" } }catch(let err){ let alert = UIAlertController(title: "Error".location(), message: err.localizedDescription, preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK".location(), style: .default, handler: nil)) self.present(alert, animated: true, completion: nil) return } }else if let videoDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) { do{ let depthFormats = videoDevice.activeFormat.supportedDepthDataFormats if depthFormats.count > 0 { let filtered = depthFormats.filter({ CMFormatDescriptionGetMediaSubType($0.formatDescription) == kCVPixelFormatType_DepthFloat16 }) let selectedFormat = filtered.max(by: { first, second in CMVideoFormatDescriptionGetDimensions(first.formatDescription).width < CMVideoFormatDescriptionGetDimensions(second.formatDescription).width }) if let f = selectedFormat { try videoDevice.lockForConfiguration() videoDevice.activeDepthDataFormat = f videoDevice.unlockForConfiguration() } } let videoInput = try AVCaptureDeviceInput(device: videoDevice) if captureSession.canAddInput(videoInput) { captureSession.addInput(videoInput) fileDic["camera"] = "two" } }catch(let err){ let alert = UIAlertController(title: "Error".location(), message: err.localizedDescription, preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK".location(), style: .default, handler: nil)) self.present(alert, animated: true, completion: nil) return } }else if let videoDevice = AVCaptureDevice.default(.builtInTelephotoCamera, for: .video, position: .back) { do{ let depthFormats = videoDevice.activeFormat.supportedDepthDataFormats if depthFormats.count > 0 { let filtered = depthFormats.filter({ CMFormatDescriptionGetMediaSubType($0.formatDescription) == kCVPixelFormatType_DepthFloat16 }) let selectedFormat = filtered.max(by: { first, second in CMVideoFormatDescriptionGetDimensions(first.formatDescription).width < CMVideoFormatDescriptionGetDimensions(second.formatDescription).width }) if let f = selectedFormat { try videoDevice.lockForConfiguration() videoDevice.activeDepthDataFormat = f videoDevice.unlockForConfiguration() } } let videoInput = try AVCaptureDeviceInput(device: videoDevice) if captureSession.canAddInput(videoInput) { captureSession.addInput(videoInput) fileDic["camera"] = "one" } }catch(let err){ let alert = UIAlertController(title: "Error".location(), message: err.localizedDescription, preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK".location(), style: .default, handler: nil)) self.present(alert, animated: true, completion: nil) return } } if captureSession.canSetSessionPreset(.hd4K3840x2160) { captureSession.sessionPreset = .hd4K3840x2160 fileDic["width"] = 3840 fileDic["height"] = 2160 }else if captureSession.canSetSessionPreset(.hd1920x1080) { captureSession.sessionPreset = .hd1920x1080 fileDic["width"] = 1920 fileDic["height"] = 1080 } DispatchQueue.main.async { let previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) previewLayer.videoGravity = .resizeAspectFill previewLayer.frame = self.view.bounds self.cameraView.layer.addSublayer(previewLayer) } if captureSession.canAddOutput(output) { captureSession.addOutput(output) output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String:kCVPixelFormatType_32BGRA] } var outpputs:[AVCaptureOutput] = [output] if captureSession.canAddOutput(depthOutput) { captureSession.addOutput(depthOutput) depthOutput.isFilteringEnabled = false if let connection = depthOutput.connection(with: .depthData) { connection.isEnabled = true surportDeppth = true outpputs.append(depthOutput) } else { print("No AVCaptureConnection") } } let outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: outpputs) outputSynchronizer.setDelegate(self, queue: reciveVideoQueue) captureSession.commitConfiguration() captureSession.startRunning() fileDic["motion"] = "phone" } func updateFileName(_ name:String) { filename = name } func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) { if CoreMotitonMenager.shard.isStart { if surportDeppth { guard let syncedDepthData: AVCaptureSynchronizedDepthData = synchronizedDataCollection.synchronizedData(for: depthOutput) as? AVCaptureSynchronizedDepthData, let syncedVideoData: AVCaptureSynchronizedSampleBufferData = synchronizedDataCollection.synchronizedData(for: output) as? AVCaptureSynchronizedSampleBufferData else{ return } frameV += 1 if syncedDepthData.depthDataWasDropped || syncedVideoData.sampleBufferWasDropped { return } let depthData = syncedDepthData.depthData let depthPixelBuffer = depthData.depthDataMap let sampleBuffer = syncedVideoData.sampleBuffer let data = CoreMotitonMenager.shard.updateFrame(sampleBuffer: sampleBuffer) currentFile?.updateFrame(same: sampleBuffer, data: data) }else{ guard let syncedVideoData: AVCaptureSynchronizedSampleBufferData = synchronizedDataCollection.synchronizedData(for: output) as? AVCaptureSynchronizedSampleBufferData else{ return } frameV += 1 if syncedVideoData.sampleBufferWasDropped { return } let sampleBuffer = syncedVideoData.sampleBuffer let data = CoreMotitonMenager.shard.updateFrame(sampleBuffer: sampleBuffer) currentFile?.updateFrame(same: sampleBuffer, data: data) } } } }