import ARKit
import SceneKit
import UIKit
import AVFoundation
class ViewController: UIViewController, ARSessionDelegate {
// MARK: - Outlets
@IBOutlet var sceneView: ARSCNView!
@IBOutlet weak var tabBar: UITabBar!
// MARK: - Properties
var faceAnchorsAndContentControllers: [ARFaceAnchor: VirtualContentController] = [:]
var selectedVirtualContent: VirtualContentType! {
didSet {
guard oldValue != nil, oldValue != selectedVirtualContent
else { return }
// Remove existing content when switching types.
for contentController in faceAnchorsAndContentControllers.values {
contentController.contentNode?.removeFromParentNode()
}
// If there are anchors already (switching content), create new controllers and generate updated content.
// Otherwise, the content controller will place it in `renderer(_:didAdd:for:)`.
for anchor in faceAnchorsAndContentControllers.keys {
let contentController = selectedVirtualContent.makeController()
if let node = sceneView.node(for: anchor),
let contentNode = contentController.renderer(sceneView, nodeFor: anchor) {
node.addChildNode(contentNode)
faceAnchorsAndContentControllers[anchor] = contentController
}
}
}
}
// MARK: - AVCaptureSession Properties
var captureSession: AVCaptureSession?
var photoOutput: AVCapturePhotoOutput?
var captureDevice: AVCaptureDevice?
var captureCompletion: ((UIImage?, Error?) -> Void)?
// Dedicated serial queue for AVCaptureSession
let captureSessionQueue = DispatchQueue(label: "com.yourapp.captureSessionQueue")
// Activity Indicator for user feedback
var activityIndicator: UIActivityIndicatorView!
// MARK: - ARFrame Storage
// Property to store the captured ARFrame's image
var capturedARFrameImage: UIImage?
// MARK: - View Controller Life Cycle
override func viewDidLoad() {
super.viewDidLoad()
sceneView.delegate = self
sceneView.session.delegate = self
sceneView.automaticallyUpdatesLighting = true
// Set the initial face content.
tabBar.selectedItem = tabBar.items!.first!
selectedVirtualContent = VirtualContentType(rawValue: tabBar.selectedItem!.tag)
// Initialize and configure AVCaptureSession
setupCaptureSession()
// Initialize Activity Indicator
setupActivityIndicator()
// Add Capture Button
setupCaptureButton()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Run ARSession
resetTracking()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause ARSession
sceneView.session.pause()
}
// MARK: - Setup AVCaptureSession
func setupCaptureSession() {
captureSession = AVCaptureSession()
captureSession?.sessionPreset = .photo
// Select the front camera
guard let device = AVCaptureDevice.default(.builtInWideAngleCamera,
for: .video,
position: .front) else {
print("Front camera is not available.")
return
}
captureDevice = device
do {
let input = try AVCaptureDeviceInput(device: device)
if let captureSession = captureSession, captureSession.canAddInput(input) {
captureSession.addInput(input)
} else {
print("Unable to add input to capture session.")
return
}
} catch {
print("Error configuring capture device input: \(error)")
return
}
// Configure photo output
photoOutput = AVCapturePhotoOutput()
if let photoOutput = photoOutput, let captureSession = captureSession, captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
} else {
print("Unable to add photo output to capture session.")
return
}
// Configure photo settings
photoOutput?.isHighResolutionCaptureEnabled = true
}
// MARK: - Setup Activity Indicator
func setupActivityIndicator() {
if #available(iOS 13.0, *) {
activityIndicator = UIActivityIndicatorView(style: .large)
} else {
// Fallback on earlier versions
activityIndicator = UIActivityIndicatorView(style: .gray)
}
activityIndicator.center = view.center
activityIndicator.hidesWhenStopped = true
view.addSubview(activityIndicator)
}
// MARK: - ARSessionDelegate
func session(_ session: ARSession, didFailWithError error: Error) {
guard error is ARError else { return }
let errorWithInfo = error as NSError
let messages = [
errorWithInfo.localizedDescription,
errorWithInfo.localizedFailureReason,
errorWithInfo.localizedRecoverySuggestion
]
let errorMessage = messages.compactMap({ $0 }).joined(separator: "\n")
DispatchQueue.main.async {
self.displayErrorMessage(title: "The AR session failed.", message: errorMessage)
}
}
/// - Tag: ARFaceTrackingSetup
func resetTracking() {
guard ARFaceTrackingConfiguration.isSupported else { return }
let configuration = ARFaceTrackingConfiguration()
if #available(iOS 13.0, *) {
configuration.maximumNumberOfTrackedFaces = 1 // Limit to 1 for performance
}
configuration.isLightEstimationEnabled = true
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
faceAnchorsAndContentControllers.removeAll()
}
// MARK: - Error Handling
func displayErrorMessage(title: String, message: String) {
// Present an alert informing about the error that has occurred.
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let restartAction = UIAlertAction(title: "Restart Session", style: .default) { _ in
alertController.dismiss(animated: true, completion: nil)
self.resetTracking()
}
alertController.addAction(restartAction)
present(alertController, animated: true, completion: nil)
}
// Auto-hide the home indicator to maximize immersion in AR experiences.
override var prefersHomeIndicatorAutoHidden: Bool {
return true
}
// Hide the status bar to maximize immersion in AR experiences.
override var prefersStatusBarHidden: Bool {
return true
}
// MARK: - Setup Capture Button
func setupCaptureButton() {
// Create button
let captureButton = UIButton(type: .system)
captureButton.setTitle("Capture", for: .normal)
captureButton.backgroundColor = UIColor.systemBlue.withAlphaComponent(0.7)
captureButton.setTitleColor(.white, for: .normal)
captureButton.layer.cornerRadius = 10
captureButton.translatesAutoresizingMaskIntoConstraints = false
// Add target action
captureButton.addTarget(self, action: #selector(captureHiResFrame(_:)), for: .touchUpInside)
// Add to view
self.view.addSubview(captureButton)
// Set constraints (e.g., bottom center)
NSLayoutConstraint.activate([
captureButton.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor, constant: -20),
captureButton.centerXAnchor.constraint(equalTo: view.centerXAnchor),
captureButton.widthAnchor.constraint(equalToConstant: 100),
captureButton.heightAnchor.constraint(equalToConstant: 50)
])
}
// MARK: - Capture High-Resolution Frame with Session Switching
@objc func captureHiResFrame(_ sender: UIButton) {
// Ensure iOS 17+ is available
guard #available(iOS 17.0, *), ARFaceTrackingConfiguration.isSupported else {
print("High-resolution capture requires iOS 17+ and a supported device.")
return
}
// Ensure ARSession is running and has at least one frame
guard let currentFrame = sceneView.session.currentFrame else {
print("ARSession is not running or no frame is available.")
return
}
// Convert ARFrame's captured image to UIImage
let arImage = convertPixelBufferToUIImage(pixelBuffer: currentFrame.capturedImage)
capturedARFrameImage = arImage // Store the ARFrame image
// Record the start time
let startTime = Date()
// Pause the ARSession on the main thread
DispatchQueue.main.async { [weak self] in
self?.sceneView.session.pause()
}
// Start the AVCaptureSession on a background thread
captureSessionQueue.async { [weak self] in
guard let self = self else { return }
// Start running the capture session
self.captureSession?.startRunning()
// Wait until the capture session is running
let maxWaitTime: TimeInterval = 2.0 // Maximum wait time in seconds
let pollingInterval: TimeInterval = 0.05 // Polling interval in seconds
var waitedTime: TimeInterval = 0.0
while !(self.captureSession?.isRunning ?? false) && waitedTime < maxWaitTime {
Thread.sleep(forTimeInterval: pollingInterval)
waitedTime += pollingInterval
}
if self.captureSession?.isRunning == true {
// Configure photo settings
let photoSettings = AVCapturePhotoSettings()
photoSettings.isHighResolutionPhotoEnabled = true
// Set up the capture completion handler
self.captureCompletion = { [weak self] image, error in
guard let self = self else { return }
// Stop the AVCaptureSession
self.captureSession?.stopRunning()
// Resume the ARSession on the main thread
DispatchQueue.main.async {
if let configuration = self.sceneView.session.configuration {
self.sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// Record the end time
let endTime = Date()
let timeInterval = endTime.timeIntervalSince(startTime)
// Print the time taken
print("Time taken to capture image: \(timeInterval) seconds")
if let error = error {
print("Error capturing image: \(error)")
return
}
if let image = image {
// Show Activity Indicator
DispatchQueue.main.async {
self.activityIndicator.startAnimating()
}
// Save the image to the photo library
UIImageWriteToSavedPhotosAlbum(image, self, #selector(self.image(_:didFinishSavingWithError:contextInfo:)), nil)
print("Successfully captured and saved AVCapture image with size: \(image.size.width)x\(image.size.height)")
// Optionally, save the ARFrame image as well
if let arImage = self.capturedARFrameImage {
UIImageWriteToSavedPhotosAlbum(arImage, self, #selector(self.image(_:didFinishSavingWithError:contextInfo:)), nil)
print("Successfully saved ARSession frame image with size: \(arImage.size.width)x\(arImage.size.height)")
}
} else {
print("Failed to capture AVCapture image.")
}
}
}
// Capture the photo
self.photoOutput?.capturePhoto(with: photoSettings, delegate: self)
} else {
print("Capture session failed to start within \(maxWaitTime) seconds.")
// Optionally, inform the user about the failure
DispatchQueue.main.async { [weak self] in
self?.displayErrorMessage(title: "Capture Failed", message: "Unable to start the camera for capturing the image. Please try again.")
}
}
}
}
// MARK: - Convert CVPixelBuffer to UIImage
func convertPixelBufferToUIImage(pixelBuffer: CVPixelBuffer) -> UIImage? {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
if let cgImage = context.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage)
return uiImage
}
return nil
}
// MARK: - Handle Image Save Completion
@objc func image(_ image: UIImage, didFinishSavingWithError error: Error?, contextInfo: UnsafeRawPointer) {
if let error = error {
// Handle the error
print("Error saving image: \(error.localizedDescription)")
// Optionally, inform the user
DispatchQueue.main.async { [weak self] in
let alert = UIAlertController(title: "Save Error", message: error.localizedDescription, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .default))
self?.present(alert, animated: true)
}
} else {
// Image saved successfully
print("Image saved to photo library successfully.")
// Optionally, inform the user
DispatchQueue.main.async { [weak self] in
let alert = UIAlertController(title: "Saved!", message: "Your images have been saved to your photo library.", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .default))
self?.present(alert, animated: true)
}
}
// Stop Activity Indicator
DispatchQueue.main.async { [weak self] in
self?.activityIndicator.stopAnimating()
}
}
}
// MARK: - UITabBarDelegate
extension ViewController: UITabBarDelegate {
func tabBar(_ tabBar: UITabBar, didSelect item: UITabBarItem) {
guard let contentType = VirtualContentType(rawValue: item.tag)
else { fatalError("unexpected virtual content tag") }
selectedVirtualContent = contentType
}
}
// MARK: - ARSCNViewDelegate
extension ViewController: ARSCNViewDelegate {
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
guard let faceAnchor = anchor as? ARFaceAnchor else { return }
// If this is the first time with this anchor, get the controller to create content.
// Otherwise (switching content), will change content when setting `selectedVirtualContent`.
DispatchQueue.main.async {
let contentController = self.selectedVirtualContent.makeController()
if node.childNodes.isEmpty, let contentNode = contentController.renderer(renderer, nodeFor: faceAnchor) {
node.addChildNode(contentNode)
self.faceAnchorsAndContentControllers[faceAnchor] = contentController
}
}
}
/// - Tag: ARFaceGeometryUpdate
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
guard let faceAnchor = anchor as? ARFaceAnchor,
let contentController = faceAnchorsAndContentControllers[faceAnchor],
let contentNode = contentController.contentNode else {
return
}
contentController.renderer(renderer, didUpdate: contentNode, for: anchor)
}
func renderer(_ renderer: SCNSceneRenderer, didRemove node: SCNNode, for anchor: ARAnchor) {
guard let faceAnchor = anchor as? ARFaceAnchor else { return }
faceAnchorsAndContentControllers[faceAnchor] = nil
}
}
// MARK: - AVCapturePhotoCaptureDelegate
extension ViewController: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput,
didFinishProcessingPhoto photo: AVCapturePhoto,
error: Error?) {
if let error = error {
captureCompletion?(nil, error)
return
}
guard let photoData = photo.fileDataRepresentation(),
let image = UIImage(data: photoData) else {
captureCompletion?(nil, NSError(domain: "PhotoCapture", code: -1, userInfo: [NSLocalizedDescriptionKey: "Failed to convert photo data to UIImage."]))
return
}
captureCompletion?(image, nil)
}
}