Search code examples
swiftarkitraycasting

show masking on object which is between camera and wall using RealityKit


I made a video for generating a floor plan in which I need to capture the wall and floor together at a certain position if a user is too near to the wall or if any object come between the camera and wall/floor then need to show Too Close mask on that object something like display in this video.

I try to use rycast in session(_ session: ARSession, didUpdate frame: ARFrame) method but I am very new in AR and not know which method we need to use.

func session(_ session: ARSession, didUpdate frame: ARFrame) {
        
            
        guard let query = self.arView?.makeRaycastQuery(from: self.arView?.center ?? CGPoint.zero,
                                                  allowing: .estimatedPlane,
                                                  alignment: .any)
        else { return }
        
        guard let raycastResult = self.arView?.session.raycast(query).first
        else { return }
        
        let currentPositionOfCamera = raycastResult.worldTransform.getPosition()
        if currentPositionOfCamera != .zero {
            let distanceFromCamera = frame.camera.transform.getPosition().distanceFrom(position: currentPositionOfCamera)
            print("Distance from raycast:",distanceFromCamera)
            if (distance < 0.5) {
                 print("Too Close")
            }
        }

    } 

Solution

  • I am just learning ARKit and RealityKit as well, but wouldn't your code be:

    let currentPositionOfCamera = self.arView.cameraTransform.translation
    
    if currentPositionOfCamera != .zero {
    
        // distance is defined in simd as the distance between 2 points
        let distanceFromCamera = distance(raycastResult.worldTransform.position, currentPositionOfCamera)
        print("Distance from raycast:",distanceFromCamera)
        if (distanceFromCamera < 0.5) {
            print("Too Close")
    
            let rayDirection = normalize(raycastResult.worldTransform.position - self.arView.cameraTransform.translation)
            // This pulls the text back toward the camera from the plane
            let textPositionInWorldCoordinates = result.worldTransform.position - (rayDirection * 0.1)
    
            let textEntity = self.model(for: classification)
            // This scales the text so it is of a consistent size
            textEntity.scale = .one * raycastDistance
    
            var textPositionWithCameraOrientation = self.arView.cameraTransform
            textPositionWithCameraOrientation.translation = textPositionInWorldCoordinates
            // self.textAnchor is defined somewhere in the class as an optional
            self.textAnchor = AnchorEntity(world: textPositionWithCameraOrientation.matrix)
            textAnchor.addChild(textEntity)
            self.arView.scene.addAnchor(textAnchor)
        } else {
            guard let textAnchor = self.textAnchor else { return }
            self.removeAnchor(textAnchor)
        }
    }
    
    // Creates a text ModelEntity 
    func tooCloseModel() -> ModelEntity {
            let lineHeight: CGFloat = 0.05
            let font = MeshResource.Font.systemFont(ofSize: lineHeight)
            let textMesh = MeshResource.generateText("Too Close", extrusionDepth: Float(lineHeight * 0.1), font: font)
            let textMaterial = SimpleMaterial(color: classification.color, isMetallic: true)
            let model = ModelEntity(mesh: textMesh, materials: [textMaterial])
            // Center the text
            model.position.x -= model.visualBounds(relativeTo: nil).extents.x / 2
            return model
    }
    

    This code is adapted from Apple's Visualizing Scene Semantics.