If I place the .usdz file in the project directory alongside other .swift files, ModelEntity loads it perfectly. However, if I try to load the same file from Reality Composer Pro under RealityKitContent.rkassets, I get the error: resourceNotFound("heart").
Could someone help me with this? Thank you so much
Code:
//
// TestttttttApp.swift
// Testtttttt
//
// Created by Zhendong Chen on 2/17/25.
//
import SwiftUI
@main
struct TestttttttApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
.windowStyle(.volumetric)
}
}
//
// ContentView.swift
// Testtttttt
//
// Created by Zhendong Chen on 2/17/25.
//
import SwiftUI
import RealityKit
import RealityKitContent
struct ContentView: View {
@State private var enlarge = false
var body: some View {
RealityView { content in
do {
// MARK: Work
let scene = try await ModelEntity(named: "heart")
content.add(scene)
// MARK: Doesn't work
// let scene = try await ModelEntity(named: "heart", in: realityKitContentBundle)
// content.add(scene)
} catch {
print(error)
}
}
}
}
#Preview(windowStyle: .volumetric) {
ContentView()
}
Post
Replies
Boosts
Views
Activity
I am currently developing an app for visionOS and have encountered an issue involving a component and system that moves an entity up and down within a specific Y-axis range. The system works as expected until I introduce sound playback using AVAudioPlayer.
Whenever I use AVAudioPlayer to play sound, the entity exhibits unexpected behaviors, such as freezing or becoming unresponsive. The freezing of the entity's movement is particularly noticeable when playing the audio for the first time. After that, it becomes less noticeable, but you can still feel it, especially when the audio is played in quick succession.
Also, the issue is more noticable on real device than the simulator
//
// IssueApp.swift
// Issue
//
// Created by Zhendong Chen on 2/1/25.
//
import SwiftUI
@main
struct IssueApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
.windowStyle(.volumetric)
}
}
//
// ContentView.swift
// Issue
//
// Created by Zhendong Chen on 2/1/25.
//
import SwiftUI
import RealityKit
import RealityKitContent
struct ContentView: View {
@State var enlarge = false
var body: some View {
RealityView { content, attachments in
// Add the initial RealityKit content
if let scene = try? await Entity(named: "Scene", in: realityKitContentBundle) {
if let sphere = scene.findEntity(named: "Sphere") {
sphere.components.set(UpAndDownComponent(speed: 0.03, minY: -0.05, maxY: 0.05))
}
if let button = attachments.entity(for: "Button") {
button.position.y -= 0.3
scene.addChild(button)
}
content.add(scene)
}
} attachments: {
Attachment(id: "Button") {
VStack {
Button {
SoundManager.instance.playSound(filePath: "apple_en")
} label: {
Text("Play audio")
}
.animation(.none, value: 0)
.fontWeight(.semibold)
}
.padding()
.glassBackgroundEffect()
}
}
.onAppear {
UpAndDownSystem.registerSystem()
}
}
}
//
// SoundManager.swift
// LinguaBubble
//
// Created by Zhendong Chen on 1/14/25.
//
import Foundation
import AVFoundation
class SoundManager {
static let instance = SoundManager()
private var audioPlayer: AVAudioPlayer?
func playSound(filePath: String) {
guard let url = Bundle.main.url(forResource: filePath, withExtension: ".mp3") else { return }
do {
audioPlayer = try AVAudioPlayer(contentsOf: url)
audioPlayer?.play()
} catch let error {
print("Error playing sound. \(error.localizedDescription)")
}
}
}
//
// UpAndDownComponent+System.swift
// Issue
//
// Created by Zhendong Chen on 2/1/25.
//
import RealityKit
struct UpAndDownComponent: Component {
var speed: Float
var axis: SIMD3<Float>
var minY: Float
var maxY: Float
var direction: Float = 1.0 // 1 for up, -1 for down
var initialY: Float?
init(speed: Float = 1.0, axis: SIMD3<Float> = [0, 1, 0], minY: Float = 0.0, maxY: Float = 1.0) {
self.speed = speed
self.axis = axis
self.minY = minY
self.maxY = maxY
}
}
struct UpAndDownSystem: System {
static let query = EntityQuery(where: .has(UpAndDownComponent.self))
init(scene: RealityKit.Scene) {}
func update(context: SceneUpdateContext) {
let deltaTime = Float(context.deltaTime) // Time between frames
for entity in context.entities(matching: Self.query, updatingSystemWhen: .rendering) {
guard var component: UpAndDownComponent = entity.components[UpAndDownComponent.self] else { continue }
// Ensure we have the initial Y value set
if component.initialY == nil {
component.initialY = entity.transform.translation.y
}
// Calculate the current position
let currentY = entity.transform.translation.y
// Move the entity up or down
let newY = currentY + (component.speed * component.direction * deltaTime)
// If the entity moves out of the allowed range, reverse the direction
if newY >= component.initialY! + component.maxY {
component.direction = -1.0 // Move down
} else if newY <= component.initialY! + component.minY {
component.direction = 1.0 // Move up
}
// Apply the new position
entity.transform.translation = SIMD3<Float>(entity.transform.translation.x, newY, entity.transform.translation.z)
// Update the component with the new direction
entity.components[UpAndDownComponent.self] = component
}
}
}
Could someone help me with this?
I am trying to apply impulseAction to an entity but everytime entity.playAnimation(impulseAnimation) is executed, the log says Cannot find a BindPoint for any bind path: "". I can't figure out what is wrong. Could someone please help me with this?
import SwiftUI
import RealityKit
import RealityKitContent
struct ImmersiveView: View {
var body: some View {
RealityView { content in
// Add the initial RealityKit content
if let immersiveContentEntity = try? await Entity(named: "Immersive", in: realityKitContentBundle), var sphere = immersiveContentEntity.findEntity(named: "Sphere") {
sphere.components.set(CollisionComponent(shapes: [ShapeResource.generateSphere(radius: 0.1)]))
sphere.components.set(PhysicsBodyComponent(shapes: [ShapeResource.generateSphere(radius: 0.1)], mass: 1000))
sphere.components[PhysicsBodyComponent.self]?.isAffectedByGravity = false
sphere.position = [0, 1, -1]
content.add(immersiveContentEntity)
// Create an action to apply an impulse, forcing the object to move upwards.
let impulseAction = ImpulseAction(linearImpulse: [0, 1, 0])
// Create a small positive duration value.
let duration: TimeInterval = 1 / 30.0
// Create an animation for the action, which will start playing
// after five seconds.
do {
let impulseAnimation = try AnimationResource
.makeActionAnimation(for: impulseAction,
duration: duration,
delay: 5.0)
// Play the sequence animation that will play the actions.
sphere.playAnimation(impulseAnimation)
} catch {
print("Error: \(error)")
}
}
}
}
}
All the logs:
Could not locate file 'default-binaryarchive.metallib' in bundle.
Error creating the CFMessagePort needed to communicate with PPT.
AddInstanceForFactory: No factory registered for id <CFUUID 0x6000029a5b80> F8BB1C28-BAE8-11D6-9C31-00039315CD46
cannot add handler to 0 from 1 - dropping
nw_socket_copy_info [C1:2] getsockopt TCP_INFO failed [102: Operation not supported on socket]
nw_socket_copy_info getsockopt TCP_INFO failed [102: Operation not supported on socket]
Registering library (/Library/Developer/CoreSimulator/Volumes/xrOS_22N840/Library/Developer/CoreSimulator/Profiles/Runtimes/xrOS 2.2.simruntime/Contents/Resources/RuntimeRoot/System/Library/PrivateFrameworks/CoreRE.framework/default.metallib) that already exists in shader manager. Library will be overwritten.
cannot add handler to 0 from 1 - dropping
Cannot find a BindPoint for any bind path: "", ""
Sync object without snapshot while removing view (id: 2816861686082450363, type: 6373420419761316588[SelectableSceneContentIdentifierComponent]).
But i think only Cannot find a BindPoint for any bind path: "", "" is relevant.
I am using the Xcode visionOS debugging tool to visualize the bounds of all the containers, but it shows my Entity is inside the Volume. Then why does it get clipped? Is there something wrong with the debugger, or am I missing something?
import SwiftUI
@main
struct RealityViewAttachmentApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
.windowStyle(.volumetric)
.defaultSize(Size3D(width: 1, height: 1, depth: 1), in: .meters)
}
}
import SwiftUI
import RealityKit
import RealityKitContent
struct ContentView: View {
var body: some View {
RealityView { content, attachments in
if let earth = try? await Entity(named: "Scene", in: realityKitContentBundle) {
content.add(earth)
if let earthAttachment = attachments.entity(for: "earth_label") {
earthAttachment.position = [0, -0.15, 0]
earth.addChild(earthAttachment)
}
if let textAttachment = attachments.entity(for: "text_label") {
textAttachment.position = [-0.5, 0, 0]
earth.addChild(textAttachment)
}
}
} attachments: {
Attachment(id: "earth_label") {
Text("Earth")
}
Attachment(id: "text_label") {
VStack {
Text("This is just an example")
.font(.title)
.padding(.bottom, 20)
Text("This is just some random content")
.font(.caption)
}
.frame(minWidth: 100, maxWidth: 300, minHeight: 100, maxHeight: 300)
.glassBackgroundEffect()
}
}
}
}
My app has a window and a volume. I am trying to display the volume on the right side of the window. I know .defaultWindowPlacement can achieve that, but I want more control over the exact position of my volume in relation to my window. I need the volume to move as I move the window so that it always stays in the same position relative to the window. I think I need a way to track the positions of both the window and the volume. If this can be achieved without immersive space, it would be great. If not, how do I do that in immersive space?
Current code:
import SwiftUI
@main
struct tiktokForSpacialModelingApp: App {
@State private var appModel: AppModel = AppModel()
var body: some Scene {
WindowGroup(id: appModel.launchWindowID) {
LaunchWindow()
.environment(appModel)
}
.windowResizability(.contentSize)
WindowGroup(id: appModel.mainViewWindowID) {
MainView()
.frame(minWidth: 500, maxWidth: 600, minHeight: 1200, maxHeight: 1440)
.environment(appModel)
}
.windowResizability(.contentSize)
WindowGroup(id: appModel.postVolumeID) {
let initialSize = Size3D(width: 900, height: 500, depth: 900)
PostVolume()
.frame(minWidth: initialSize.width, maxWidth: initialSize.width * 4, minHeight: initialSize.height, maxHeight: initialSize.height * 4)
.frame(minDepth: initialSize.depth, maxDepth: initialSize.depth * 4)
}
.windowStyle(.volumetric)
.windowResizability(.contentSize)
.defaultWindowPlacement { content, context in
// Get WindowProxy from context based on id
if let mainViewWindow = context.windows.first(where: { $0.id == appModel.mainViewWindowID }) {
return WindowPlacement(.trailing(mainViewWindow))
} else {
return WindowPlacement()
}
}
ImmersiveSpace(id: appModel.immersiveSpaceID) {
ImmersiveView()
.onAppear {
appModel.immersiveSpaceState = .open
}
.onDisappear {
appModel.immersiveSpaceState = .closed
}
}
.immersionStyle(selection: .constant(.progressive), in: .progressive)
}
}
On TikTok on Vision Pro, the home page has different minimum and maximum window heights and widths compared to the search page.
Now I am able to set minimum window size for different tab views but maximum size doesn't seem to work
Code:
// WindowSizeModel.swift
import Foundation
import SwiftUI
enum TabType {
case home
case search
case profile
}
@Observable
class WindowSizeModel {
var minWidth: CGFloat = 400
var maxWidth: CGFloat = 500
var minHeight: CGFloat = 400
var maxHeight: CGFloat = 500
func setWindowSize(for tab: TabType) {
switch tab {
case .home:
configureWindowSize(minWidth: 400, maxWidth: 500, minHeight: 400, maxHeight: 500)
case .search:
configureWindowSize(minWidth: 300, maxWidth: 800, minHeight: 300, maxHeight: 800)
case .profile:
configureWindowSize(minWidth: 800, maxWidth: 1000, minHeight: 800, maxHeight: 1000)
}
}
private func configureWindowSize(minWidth: CGFloat, maxWidth: CGFloat, minHeight: CGFloat, maxHeight: CGFloat) {
self.minWidth = minWidth
self.maxWidth = maxWidth
self.minHeight = minHeight
self.maxHeight = maxHeight
}
}
// tiktokForSpacialModelingApp.swift
import SwiftUI
@main
struct tiktokForSpacialModelingApp: App {
@State private var windowSizeModel: WindowSizeModel = WindowSizeModel()
var body: some Scene {
WindowGroup {
MainView()
.frame(
minWidth: windowSizeModel.minWidth, maxWidth: windowSizeModel.maxWidth,
minHeight: windowSizeModel.minHeight, maxHeight: windowSizeModel.maxHeight)
.environment(windowSizeModel)
}
.windowResizability(.contentSize)
}
}
// MainView.swift
import SwiftUI
import RealityKit
struct MainView: View {
@State private var selectedTab: TabType = TabType.home
@Environment(WindowSizeModel.self) var windowSizeModel;
var body: some View {
@Bindable var windowSizeModel = windowSizeModel
TabView(selection: $selectedTab) {
Tab("Home", systemImage: "play.house", value: TabType.home) {
HomeView()
}
Tab("Search", systemImage: "magnifyingglass", value: TabType.search) {
SearchView()
}
Tab("Profile", systemImage: "person.crop.circle", value: TabType.profile) {
ProfileView()
}
}
.onAppear {
windowSizeModel.setWindowSize(for: TabType.home)
}
.onChange(of: selectedTab) { oldTab, newTab in
if oldTab == newTab {
return
}
else if newTab == TabType.home {
windowSizeModel.setWindowSize(for: TabType.home)
}
else if newTab == TabType.search {
windowSizeModel.setWindowSize(for: TabType.search)
}
else if newTab == TabType.profile {
windowSizeModel.setWindowSize(for: TabType.profile)
}
}
}
}