I referred to the Enhanced Sensor Access code from WWDC24 to display the main camera of Vision Pro in the application interface, but it is not displaying

this is my code: import Foundation import ARKit import SwiftUI

class CameraViewModel: ObservableObject { private var arKitSession = ARKitSession() @Published var capturedImage: UIImage? private var pixelBuffer: CVPixelBuffer? private var cameraAccessAuthorizationStatus = ARKitSession.AuthorizationStatus.notDetermined

func startSession() {
    guard CameraFrameProvider.isSupported else {
        print("Device does not support main camera")
        return
    }

    Task {
        await requestCameraAccess()
        guard cameraAccessAuthorizationStatus == .allowed else {
            print("User did not authorize camera access")
            return
        }

        let formats = CameraVideoFormat.supportedVideoFormats(for: .main, cameraPositions: [.left])
        let cameraFrameProvider = CameraFrameProvider()

        print("Requesting camera authorization...")
        let authorizationResult = await arKitSession.requestAuthorization(for: [.cameraAccess])
        cameraAccessAuthorizationStatus = authorizationResult[.cameraAccess] ?? .notDetermined
        guard cameraAccessAuthorizationStatus == .allowed else {
            print("Camera data access authorization failed")
            return
        }

        print("Camera authorization successful, starting ARKit session...")
        
        do {
            try await arKitSession.run([cameraFrameProvider])
            print("ARKit session is running")

            guard let cameraFrameUpdates = cameraFrameProvider.cameraFrameUpdates(for: formats[0]) else {
                print("Unable to get camera frame updates")
                return
            }

            print("Successfully got camera frame updates")
            for await cameraFrame in cameraFrameUpdates {
                guard let mainCameraSample = cameraFrame.sample(for: .left) else {
                    print("Unable to get main camera sample")
                    continue
                }
                print("Successfully got main camera sample")
                self.pixelBuffer = mainCameraSample.pixelBuffer
            }

            DispatchQueue.main.async {
                self.capturedImage = self.convertToUIImage(pixelBuffer: self.pixelBuffer)
                if self.capturedImage != nil {
                    print("Successfully captured and converted image")
                } else {
                    print("Image conversion failed")
                }
            }
        } catch {
            print("ARKit session failed to run: \(error)")
        }
    }
}

private func requestCameraAccess() async {
    let authorizationResult = await arKitSession.requestAuthorization(for: [.cameraAccess])
    cameraAccessAuthorizationStatus = authorizationResult[.cameraAccess] ?? .notDetermined
    if cameraAccessAuthorizationStatus == .allowed {
        print("User granted camera access")
    } else {
        print("User denied camera access")
    }
}

private func convertToUIImage(pixelBuffer: CVPixelBuffer?) -> UIImage? {
    guard let pixelBuffer = pixelBuffer else {
        print("Pixel buffer is nil")
        return nil
    }
    let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
    let context = CIContext()
    if let cgImage = context.createCGImage(ciImage, from: ciImage.extent) {
        return UIImage(cgImage: cgImage)
    }
    print("Unable to create CGImage")
    return nil
}

}

this my log: User granted camera access Requesting camera authorization... Camera authorization successful, starting ARKit session... ARKit session is running Successfully got camera frame updates void * _Nullable NSMapGet(NSMapTable * _Nonnull, const void * _Nullable): map table argument is NULL

I referred to the Enhanced Sensor Access code from WWDC24 to display the main camera of Vision Pro in the application interface, but it is not displaying
 
 
Q