Turn on ARView.ARSession.ARConfiguration.providesAudioData = true and add a ModelEntity to ARView (its Material is VideoMaterial (avPlayer: player) this video contains audio), and the video does not play properly?

import SwiftUI import RealityKit import ARKit import AVFoundation

struct ContentView : View { var body: some View { ARViewContainer().edgesIgnoringSafeArea(.all) } }

struct ARViewContainer: UIViewRepresentable {

func makeUIView(context: Context) -> ARView {
    
    let arView = ARView(frame: .zero)
    arView.session.delegate = context.coordinator
    let worldConfig = ARWorldTrackingConfiguration()
    worldConfig.planeDetection = .horizontal
    // worldConfig.providesAudioData = true    // open here -----> Error:
    arView.session.run(worldConfig)
    
    addTestEntity(arView: arView)
    
    
    return arView
    
}

func updateUIView(_ uiView: ARView, context: Context) {}


func makeCoordinator() -> Coordinator {
    Coordinator()
}


class Coordinator: NSObject, ARSessionDelegate, ARSessionObserver {
    
    func session(_ session: ARSession, didOutputAudioSampleBuffer audioSampleBuffer: CMSampleBuffer) {
         
    }
}

}

func addTestEntity(arView: ARView) {

let mesh = MeshResource.generatePlane(width: 0.5, depth: 0.35)
guard let url = Bundle.main.url(forResource: "videoplayback", withExtension: "mp4") else { return }
let player = AVPlayer(url: url)
let videoMaterial = VideoMaterial(avPlayer: player)
let model = ModelEntity(mesh: mesh, materials: [videoMaterial])
model.transform.translation.y = 0.05
let anchor = AnchorEntity(.plane(.horizontal, classification: .any, minimumBounds: SIMD2<Float>(0.2, 0.2)))
anchor.children.append(model)
player.play()
arView.scene.anchors.append(anchor)

}

Error: failed to update STS state: Error Domain=com.apple.STS-N Code=1396929899 "Error: failed to signal change" UserInfo={NSLocalizedDescription=Error: failed to signal change} failed to update STS state: Error Domain=com.apple.STS-N Code=1396929899 "Error: failed to signal change" UserInfo={NSLocalizedDescription=Error: failed to signal change} ...... ARSession <0x125d88040>: did fail with error: Error Domain=com.apple.arkit.error Code=102 "Required sensor failed." UserInfo={NSLocalizedFailureReason=A sensor failed to deliver the required input., NSUnderlyingError=0x302922dc0 {Error Domain=AVFoundationErrorDomain Code=-11819 "Cannot Complete Action" UserInfo={NSLocalizedDescription=Cannot Complete Action, NSLocalizedRecoverySuggestion=Try again later.}}, NSLocalizedRecoverySuggestion=Make sure that the application has the required privacy settings., NSLocalizedDescription=Required sensor failed.}

iOS 17.5.1 Xcode 15.4

Currently, your code starts video playback in addTestEntity(arView: ARView), which is called right after running the ARSession. Since running the session is an asynchronous operation, configuring the session may not have completed at this point.

I would recommend calling player.play() only after a plane has been detected and the object is anchored in the scene. That way, playback will start at the time when the user actually sees the object appear. To do so, you can use the session(_ session: ARSession, didAdd anchors: [ARAnchor]) callback.

Try again and it still doesn't work:

import SwiftUI import RealityKit import ARKit import AVFoundation

struct ContentView : View { var body: some View { ARViewContainer().edgesIgnoringSafeArea(.all) } }

struct ARViewContainer: UIViewRepresentable {

let arView = ARView(frame: .zero)

func makeUIView(context: Context) -> ARView {
    arView.session.delegate = context.coordinator
    let worldConfig = ARWorldTrackingConfiguration()
    worldConfig.planeDetection = .horizontal
    worldConfig.providesAudioData = true
    arView.session.run(worldConfig)
    arView.tapGestureRecognizer()
    return arView
}

func updateUIView(_ uiView: ARView, context: Context) {}


func makeCoordinator() -> Coordinator {
    Coordinator( )
}


class Coordinator: NSObject, ARSessionDelegate, ARSessionObserver {
    
    
    
    
    func session(_ session: ARSession, didOutputAudioSampleBuffer audioSampleBuffer: CMSampleBuffer) {
         
    }
    
     
    func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
         
    }
    
}

}

func addTestEntity(arView: ARView) {

let mesh = MeshResource.generatePlane(width: 0.1, height: 0.13)
guard let url = Bundle.main.url(forResource: "videoplayback", withExtension: "mp4") else { return }
let player = AVPlayer(url: url)
let videoMaterial = VideoMaterial(avPlayer: player)
let model = ModelEntity(mesh: mesh, materials: [videoMaterial])
model.name = "NewModel"
model.transform.translation.y = 0.05
let anchor = AnchorEntity(.plane(.horizontal, classification: .any, minimumBounds: SIMD2<Float>(0.2, 0.2)))
anchor.children.append(model)
arView.scene.anchors.append(anchor)

}

extension ARView {

func tapGestureRecognizer() {
    let tap = UITapGestureRecognizer(target: self, action: #selector(handleTap(_ :)))
    tap.numberOfTapsRequired = 1
    self.addGestureRecognizer(tap)
}


@objc func handleTap(_ gesture: UITapGestureRecognizer) {
    addTestEntity(arView: self)
    guard let entity = scene.findEntity(named: "NewModel") else {
        return
    }
    if let modelEntity = entity as? ModelEntity {
        modelEntity.model?.materials.forEach { value in
            if let videoMaterial = value as? VideoMaterial {
                videoMaterial.avPlayer?.play()
                print("video___play")
            }
        }
    }
}

}

Turn on ARView.ARSession.ARConfiguration.providesAudioData = true and add a ModelEntity to ARView (its Material is VideoMaterial (avPlayer: player) this video contains audio), and the video does not play properly?
 
 
Q