The project is building without any errors or warnings and when I run the app I'm able to see the camera view. But when I click on the start scanning button the camera view freezes and nothing happens. It is supposed to start plotting red points in the environment when I tap the start scanning button and when I tap the stop scanning button it should direct me to another screen to display the point cloud itself. The following are the different files in my project. Please let me know what I'm doing incorrectly.
ContentView.swift
import AVFoundation
import ARKit
import RealityKit
import SceneKit
import SwiftUI
import Foundation
struct ContentView: View {
@StateObject var scanner = ScannerViewController()
var body: some View {
NavigationView {
VStack {
ARViewContainer().edgesIgnoringSafeArea(.all)
HStack {
Spacer()
Button(action: {
scanner.startScanning()
}, label: {
Text("Start Scanning")
})
.padding()
Spacer()
NavigationLink(destination: PointCloudView(pointCloud: scanner.pointCloud, pointSize: 5.0)) {
Text("Stop Scanning")
}
.padding()
Spacer()
}
}
.navigationBarTitle(Text("Scan an Object"))
}
}
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
}
}
}
PointCloudView.swift
import AVFoundation
import ARKit
import RealityKit
import SceneKit
import SwiftUI
import Foundation
struct PointCloudView: View {
var pointCloud: [SIMD3<Float>]
var pointSize: CGFloat
var body: some View {
ZStack {
ForEach(pointCloud, id: \.self) { point in
Sphere()
.frame(width: pointSize, height: pointSize, alignment: .center)
.position(CGPoint(x: CGFloat(point.x), y: CGFloat(point.y)))
.foregroundColor(.red)
}
}
}
}
struct Sphere: Shape {
func path(in rect: CGRect) -> Path {
let center = CGPoint(x: rect.width / 2, y: rect.height / 2)
let radius = min(rect.width, rect.height) / 2
let path = Path { p in
p.addArc(center: center, radius: radius, startAngle: .zero, endAngle: .degrees(360), clockwise: true)
}
return path
}
}
ScannerViewController.swift
import AVFoundation
import ARKit
import RealityKit
import SceneKit
import SwiftUI
import Combine
import Foundation
class ScannerViewController: NSObject, ObservableObject {
private var cancellables = Set<AnyCancellable>()
@Published var isScanning = false
@Published var pointCloud: [SIMD3<Float>] = []
private let arSession = ARSession()
private var arConfiguration: ARConfiguration {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
configuration.environmentTexturing = .automatic
return configuration
}
func startScanning() {
print("started scanning")
isScanning = true
arSession.run(arConfiguration)
}
func stopScanning() {
print("stopped scanning")
isScanning = false
arSession.pause()
}
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
print("continusoyl called")
guard isScanning else { return }
// Get the current ARFrame
guard let currentFrame = self.arSession.currentFrame else { return }
// Generate the point cloud data from the ARFrame
guard let pointCloud = currentFrame.rawFeaturePoints?.points else { return }
// Convert the point cloud data to a format that can be displayed in the PointCloudView
let convertedPointCloud = pointCloud.map { point in
SIMD3<Float>(point.x, point.y, point.z)
}
// Pass the converted point cloud data to the PointCloudView
DispatchQueue.main.async {
self.pointCloud = convertedPointCloud
}
}
}
I've tried using both SceneKit and Metal to render the point cloud and referred to apple's documentation: https://developer.apple.com/documentation/arkit/environmental_analysis/displaying_a_point_cloud_using_scene_depth
I've also tried building a storyboard project but storyboard and SceneKit have been creating a lot of errors. Using metal and swiftui builds without errors at the very least but still doesn't function.