I’m facing a problem while trying to achieve spatial audio effects in my iOS 18 app. I have tried several approaches to get good 3D audio, but the effect never felt good enough or it didn’t work at all.
Also what mostly troubles me is I noticed that AirPods I have doesn’t recognize my app as one having spatial audio (in audio settings it shows "Spatial Audio Not Playing"). So i guess my app doesn't use spatial audio potential.
- First approach uses AVAudioEnviromentNodewithAVAudioEngine. Chaining position of player as well as changing listener’s doesn’t seem to change anything in how audio plays.
Here's simple how i initialize AVAudioEngine
import Foundation
import AVFoundation
class AudioManager: ObservableObject {
// important class variables
    var audioEngine: AVAudioEngine!
    var environmentNode: AVAudioEnvironmentNode!
    var playerNode: AVAudioPlayerNode!
    var audioFile: AVAudioFile?
...
   //Sound set up
func setupAudio() {
        do {
            let session = AVAudioSession.sharedInstance()
            try session.setCategory(.playback, mode: .default, options: [])
            try session.setActive(true)
        } catch {
            print("Failed to configure AVAudioSession: \(error.localizedDescription)")
        }
        audioEngine = AVAudioEngine()
        environmentNode = AVAudioEnvironmentNode()
        playerNode = AVAudioPlayerNode()
        audioEngine.attach(environmentNode)
        audioEngine.attach(playerNode)
        audioEngine.connect(playerNode, to: environmentNode, format: nil)
        audioEngine.connect(environmentNode, to: audioEngine.mainMixerNode, format: nil)
        environmentNode.listenerPosition = AVAudio3DPoint(x: 0, y: 0, z: 0)
        environmentNode.listenerAngularOrientation = AVAudio3DAngularOrientation(yaw: 0, pitch: 0, roll: 0)
      environmentNode.distanceAttenuationParameters.referenceDistance = 1.0        environmentNode.distanceAttenuationParameters.maximumDistance = 100.0
        environmentNode.distanceAttenuationParameters.rolloffFactor = 2.0
        // example.mp3 is mono sound
        guard let audioURL = Bundle.main.url(forResource: "example", withExtension: "mp3") else {
            print("Audio file not found")
            return
        }
        do {
            audioFile = try AVAudioFile(forReading: audioURL)
        } catch {
            print("Failed to load audio file: \(error)")
        }
   }
...
    //Playing sound
    func playSpatialAudio(pan: Float ) {
        guard let audioFile = audioFile else { return }
        //  left side
        playerNode.position = AVAudio3DPoint(x: pan, y: 0, z: 0)
        playerNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
        do {
            try audioEngine.start()
            playerNode.play()
        } catch {
            print("Failed to start audio engine: \(error)")
        }
...
}    
- Second more complex approach using PHASEdid better. I’ve made an exemplary app that allows players to move audio player in 3D space. I have added reverb, and sliders changing audio position up to 10 meters each direction from listener but audio seems to only really change left to right (x axis) - again I think it might be trouble with the app not being recognized as spatial.
//Crucial class Variables:
class PHASEAudioController: ObservableObject{
    private var soundSourcePosition: simd_float4x4 = matrix_identity_float4x4
    private var audioAsset: PHASESoundAsset!
    private let phaseEngine: PHASEEngine
    private let params = PHASEMixerParameters()
    private var soundSource: PHASESource
    private var phaseListener:  PHASEListener!
    private var soundEventAsset: PHASESoundEventNodeAsset?
// Initialization of PHASE
init{     
 do {
            let session = AVAudioSession.sharedInstance()
            try session.setCategory(.playback, mode: .default, options: [])
            try session.setActive(true)
        } catch {
            print("Failed to configure AVAudioSession: \(error.localizedDescription)")
        }
        // Init PHASE Engine
        phaseEngine = PHASEEngine(updateMode: .automatic)
        phaseEngine.defaultReverbPreset = .mediumHall
        phaseEngine.outputSpatializationMode = .automatic //nothing helps        
        // Set listener position to (0,0,0) in World space
        let origin: simd_float4x4 = matrix_identity_float4x4
        phaseListener = PHASEListener(engine: phaseEngine)
        phaseListener.transform = origin
        phaseListener.automaticHeadTrackingFlags = .orientation
        try! self.phaseEngine.rootObject.addChild(self.phaseListener)
        do{
            try self.phaseEngine.start();
        }
        catch {
            print("Could not start PHASE engine")
        }
 
        audioAsset = loadAudioAsset()
        // Create sound Source
        // Sphere
        soundSourcePosition.translate(z:3.0)
        let sphere = MDLMesh.newEllipsoid(withRadii: vector_float3(0.1,0.1,0.1), radialSegments: 14, verticalSegments: 14, geometryType: MDLGeometryType.triangles, inwardNormals: false, hemisphere: false, allocator: nil)
        let shape = PHASEShape(engine: phaseEngine, mesh: sphere)
        soundSource = PHASESource(engine: phaseEngine, shapes: [shape])
        soundSource.transform = soundSourcePosition
        print(soundSourcePosition)
        do {
            try phaseEngine.rootObject.addChild(soundSource)
        }
        catch {
            print ("Failed to add a child object to the scene.")
        }
        let simpleModel = PHASEGeometricSpreadingDistanceModelParameters()
        simpleModel.rolloffFactor = rolloffFactor
        soundPipeline.distanceModelParameters = simpleModel
        
        let samplerNode = PHASESamplerNodeDefinition(
            soundAssetIdentifier: audioAsset.identifier,
            mixerDefinition: soundPipeline,
            identifier: audioAsset.identifier + "_SamplerNode")
        samplerNode.playbackMode = .looping
        do {soundEventAsset = try
            phaseEngine.assetRegistry.registerSoundEventAsset(
            rootNode: samplerNode,
            identifier: audioAsset.identifier + "_SoundEventAsset")
        } catch {
            print("Failed to register a sound event asset.")
            soundEventAsset = nil
        }
}
//Playing sound
 func playSound(){
        // Fire new sound event with currently set properties
        guard let soundEventAsset else { return }
        
        params.addSpatialMixerParameters(
            identifier: soundPipeline.identifier,
            source: soundSource,
            listener: phaseListener)
        let soundEvent = try! PHASESoundEvent(engine: phaseEngine,
                                              assetIdentifier: soundEventAsset.identifier,
                                              mixerParameters: params)
        soundEvent.start(completion: nil)
    }
...
}
Also worth mentioning might be that I only own personal team account
 
  
  
  
    
  
