Receiving main camera stream

Hello,

I recently got the entitlement for the Enterprise API this week. Although adding the license and the entitlement to the project, I couldn't get any frame from the cameraFrameUpdates. Here are the logs of the authorization and the cameraFrameUpdates

[cameraAccess: allowed]
CameraFrameUpdates(stream: Swift.AsyncStream<ARKit.CameraFrame>(context: Swift.AsyncStream<ARKit.CameraFrame>._Context))

Could anyone point out what I'm doing wrong in the process?

Answered by Vision Pro Engineer in 795216022

Here's example code that displays the main camera in a window in an immersive space. Note: ARKit is only available in an immersive space. If the example doesn't work please ensure your entitlements are configured correctly.

import SwiftUI
import ARKit

@main
struct CameraApp: App {
    
    @Environment(\.openImmersiveSpace) private var openImmersiveSpace
    @Environment(\.openWindow) private var openWindow

    var body: some Scene {
        WindowGroup {
            Button("Open immersive space") {
                Task {
                    await openImmersiveSpace(id: "ImmersiveSpace")
                }
            }
        }

        WindowGroup(id: "MainCamera") {
            MainCameraView()
        }
        
        ImmersiveSpace(id: "ImmersiveSpace") {
            VStack {
               // Put UI here to open the window. Using a task to open it for brevity.   
            }
            .task {
                openWindow(id: "MainCamera")
            }
        }
    }
}

struct MainCameraView: View {
    @State private var arkitSession = ARKitSession()
    @State private var pixelBuffer: CVPixelBuffer?
    
    private var emptyImage: Image {
        Image(systemName: "camera")
    }

    private var image: Image {
        guard let pixelBuffer = pixelBuffer else {
            return emptyImage
        }

        let ciImage: CIImage = CIImage(cvPixelBuffer: pixelBuffer)
        let context: CIContext = CIContext(options: nil)
        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
            return emptyImage
        }
        let uiImage: UIImage = UIImage(cgImage: cgImage)

        return Image(uiImage: uiImage)
    }

    var body: some View {
        VStack {
            image
                .resizable()
                .scaledToFit()
        }
        .task {
            guard CameraFrameProvider.isSupported else {
                print("CameraFrameProvider not supported.")
                return
            }

            let formats = CameraVideoFormat.supportedVideoFormats(for: .main, cameraPositions: [CameraFrameProvider.CameraPosition.left])
            let cameraFrameProvider = CameraFrameProvider()
            
            do {
                try await arkitSession.run([cameraFrameProvider])
            } catch {
                guard let sessionError = error as? ARKitSession.Error else {
                    preconditionFailure("ARKitSession.run() returned a non-session error: \(error)")
                }
            }

            guard let cameraFrameUpdates = cameraFrameProvider.cameraFrameUpdates(for: formats[0]) else {
                preconditionFailure("Failed to get an async sequence for the first format.")
            }
            for await cameraFrame in cameraFrameUpdates {
                guard let leftSample = cameraFrame.sample(for: .left) else {
                    print("CameraFrameProviderSample - Nil camera frame left sample")
                    continue
                }

                self.pixelBuffer = leftSample.pixelBuffer
            }
        }
    }
}

Here's example code that displays the main camera in a window in an immersive space. Note: ARKit is only available in an immersive space. If the example doesn't work please ensure your entitlements are configured correctly.

import SwiftUI
import ARKit

@main
struct CameraApp: App {
    
    @Environment(\.openImmersiveSpace) private var openImmersiveSpace
    @Environment(\.openWindow) private var openWindow

    var body: some Scene {
        WindowGroup {
            Button("Open immersive space") {
                Task {
                    await openImmersiveSpace(id: "ImmersiveSpace")
                }
            }
        }

        WindowGroup(id: "MainCamera") {
            MainCameraView()
        }
        
        ImmersiveSpace(id: "ImmersiveSpace") {
            VStack {
               // Put UI here to open the window. Using a task to open it for brevity.   
            }
            .task {
                openWindow(id: "MainCamera")
            }
        }
    }
}

struct MainCameraView: View {
    @State private var arkitSession = ARKitSession()
    @State private var pixelBuffer: CVPixelBuffer?
    
    private var emptyImage: Image {
        Image(systemName: "camera")
    }

    private var image: Image {
        guard let pixelBuffer = pixelBuffer else {
            return emptyImage
        }

        let ciImage: CIImage = CIImage(cvPixelBuffer: pixelBuffer)
        let context: CIContext = CIContext(options: nil)
        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
            return emptyImage
        }
        let uiImage: UIImage = UIImage(cgImage: cgImage)

        return Image(uiImage: uiImage)
    }

    var body: some View {
        VStack {
            image
                .resizable()
                .scaledToFit()
        }
        .task {
            guard CameraFrameProvider.isSupported else {
                print("CameraFrameProvider not supported.")
                return
            }

            let formats = CameraVideoFormat.supportedVideoFormats(for: .main, cameraPositions: [CameraFrameProvider.CameraPosition.left])
            let cameraFrameProvider = CameraFrameProvider()
            
            do {
                try await arkitSession.run([cameraFrameProvider])
            } catch {
                guard let sessionError = error as? ARKitSession.Error else {
                    preconditionFailure("ARKitSession.run() returned a non-session error: \(error)")
                }
            }

            guard let cameraFrameUpdates = cameraFrameProvider.cameraFrameUpdates(for: formats[0]) else {
                preconditionFailure("Failed to get an async sequence for the first format.")
            }
            for await cameraFrame in cameraFrameUpdates {
                guard let leftSample = cameraFrame.sample(for: .left) else {
                    print("CameraFrameProviderSample - Nil camera frame left sample")
                    continue
                }

                self.pixelBuffer = leftSample.pixelBuffer
            }
        }
    }
}
Receiving main camera stream
 
 
Q