ios – Export and draw ARSCNFaceGeometry mesh from saved obj file into UIImage

0
18


I wish to save solely face mesh picture from ARSCNView for every body. Beneath is the code I a utilizing.

override func viewDidAppear(_ animated: Bool) {
        
        let configuration = ARFaceTrackingConfiguration()
        sceneView.session.run(configuration)
        
    }

 func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
    
    let faceMesh = ARSCNFaceGeometry(system: sceneView.system!)
    let node = SCNNode(geometry: faceMesh)
    
    node.title = "Check Node"
    node.geometry?.title = "Check geometry"
    node.geometry?.supplies.first?.title = "Check materials"        
    node.geometry?.firstMaterial?.fillMode = .strains
    
    return node
}

 func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
    
    if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
        
        faceGeometry.replace(from: faceAnchor.geometry)
        
        // let vertices = faceAnchor.geometry.vertices
        let textureCoordinates = faceAnchor.geometry.textureCoordinates
        
        exportMesh(node: node)
        self.index = self.index + 1            
        
    }
}

func exportMesh(_ node: SCNNode, to url: URL, completion: (() -> ())?) {
    DispatchQueue.world().async {
        let mesh = MDLMesh(scnGeometry: node.geometry!)
        let asset = MDLAsset()
        
        asset.add(mesh)
        do {
            strive asset.export(to: url)
        } catch {
            print("Cannot write mesh to url")
        }
        DispatchQueue.most important.async {
            completion?()
        }
    }
}

Above code is to export the node geometry to information app.
Subsequent I’m fetching similar file from doc listing.

func loadMeshes() {
   let asset = MDLAsset(url: path)
   let scene = SCNScene(mdlAsset: asset)

   let renderer = SCNRenderer(system: MTLCreateSystemDefaultDevice(), choices: nil)
        
        scene.rootNode.childNodes.first?.geometry?.firstMaterial?.fillMode = .strains
        scene.rootNode.childNodes.first?.geometry = geometry
        
        let renderTime = TimeInterval(0)
        
        // Output dimension
        let dimension = CGSize(width:1080, peak: 1920)
        
        // Render the picture
        let picture = renderer.snapshot(atTime: renderTime, with: dimension,
                                      antialiasingMode: SCNAntialiasingMode.multisampling4X)
        
}

Above code will retrieve mesh object, then convert it to scene. Replace scene geometry with strains. Then take snapshot of the scene.

This snapshot is simply the geometry of the face. I would like the snapshot or picture of the face mesh with respect to the unique place and angle the place it was positioned within the dwell view.

If I look on the aspect or up down, then mesh picture generated from the scene must also mirror similar.

Mesh image drawn with snapshot