import AVFoundation import UIKit final class CameraView: UIView { private lazy var videoDataOutput: AVCaptureVideoDataOutput = { let v = AVCaptureVideoDataOutput() v.alwaysDiscardsLateVideoFrames = true v.setSampleBufferDelegate(self, queue: videoDataOutputQueue) v.connection(with: .video)?.isEnabled = true return v }() private lazy var session: AVCaptureSession = { let s = AVCaptureSession() s.sessionPreset = .hd1280x720 //.cif352x288 //.vga640x480 s.automaticallyConfiguresApplicationAudioSession = false return s }() private lazy var previewLayer: AVCaptureVideoPreviewLayer = { let l = AVCaptureVideoPreviewLayer(session: session) l.videoGravity = .resizeAspect return l }() private let videoDataOutputQueue: DispatchQueue = DispatchQueue(label: "\(Bundle.main.bundleIdentifier ?? "").videoDataOutputQueue") private let captureDevice: AVCaptureDevice? = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) private let audioCaptureDevice: AVCaptureDevice? = AVCaptureDevice.default(for: .audio) private var audioSession: AVAudioSession! var movieOutput = AVCaptureMovieFileOutput() override init(frame: CGRect) { super.init(frame: frame) commonInit() } required init?(coder aDecoder: NSCoder) { super.init(coder: aDecoder) commonInit() } private func commonInit() { contentMode = .scaleAspectFit beginSession() } private func beginSession() { do { guard let captureDevice = captureDevice else { return } guard let audioCaptureDevice = audioCaptureDevice else { return } let deviceInput = try AVCaptureDeviceInput(device: captureDevice) if session.canAddInput(deviceInput) { session.addInput(deviceInput) } let aDeviceInput = try AVCaptureDeviceInput(device: audioCaptureDevice) audioSession = AVAudioSession.sharedInstance() do { if #available(iOS 11.0, *) { try audioSession.setCategory(.playAndRecord, mode: .spokenAudio, policy: AVAudioSession.RouteSharingPolicy.default, options: [.duckOthers]) } else { try audioSession.setCategory(.playAndRecord, mode: .spokenAudio) } try audioSession.setActive(true) } catch let error { print("Audio setup error", #file, #function, #line, error.localizedDescription) } if session.canAddInput(aDeviceInput) { session.addInput(aDeviceInput) } if session.canAddOutput(videoDataOutput) { session.addOutput(videoDataOutput) } layer.masksToBounds = true layer.addSublayer(previewLayer) previewLayer.frame = bounds session.addOutput(movieOutput) session.startRunning() startRecording() } catch let error { debugPrint("\(self.self): \(#function) line: \(#line). \(error.localizedDescription)") } } func startRecording() { // FileSys is class that find current working directory let exportURL = FileSys.getCurrentWorkingDir().appendingPathComponent("Feed.mov") var videoConnection: AVCaptureConnection? for connection in self.movieOutput.connections { for port in connection.inputPorts { if port.mediaType == AVMediaType.video { videoConnection = connection if videoConnection!.isVideoMirroringSupported { videoConnection!.isVideoMirrored = true } } } } videoConnection?.videoOrientation = .portrait movieOutput.startRecording(to: exportURL, recordingDelegate: self) } override func layoutSubviews() { super.layoutSubviews() previewLayer.frame = bounds } } extension CameraView: AVCaptureFileOutputRecordingDelegate { func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { print("FILE OUTPUT:", #function, error.debugDescription) print(outputFileURL.path) } } extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate {}