我正在尝试生成一个 ArFragment,它应该在来自外部系统的指定位置添加一些 ViewRenderable。我已经使用 ArCore SceneView 进行了一些测试,但我对该解决方案与外部智能眼镜(Epson Moverio BT-40)的使用有些怀疑。
我的疑惑如下:
是否可以在没有摄像头馈送的情况下创建 ArCore 会话?我发现 SceneView 似乎可以完成此约束,但是自定义 ViewRenderable 被添加到场景中随设备移动的固定位置,同时我需要将其添加到现实世界中的固定位置。问题在于,如果没有摄像头,ArCore 无法启动场景跟踪,并且最终无法将锚点添加到场景中。
是否可以更改我们在场景中添加锚点的 IMU 传感器?其实我认为运动检测是相对于Android智能手机的,同时我应该使用耳机集成的IMU传感器。
下面我添加将模型添加到 SceneView 的代码:
import android.app.Application
import android.net.Uri
import android.os.Bundle
import android.util.Log
import android.view.View
import android.widget.Toast
import androidx.fragment.app.Fragment
import androidx.lifecycle.lifecycleScope
import com.android.example.sensorstest.R
import com.google.ar.core.*
import com.google.ar.sceneform.*
import com.google.ar.sceneform.math.Quaternion
import com.google.ar.sceneform.math.Vector3
import com.google.ar.sceneform.rendering.ModelRenderable
import com.google.ar.sceneform.ux.FootprintSelectionVisualizer
import com.google.ar.sceneform.ux.TransformableNode
import com.google.ar.sceneform.ux.TransformationSystem
import kotlinx.android.synthetic.main.ar_fragment_no_camera_feed.*
import kotlinx.coroutines.future.await
import kotlinx.coroutines.launch
class ArEasterEggNoCamFragment : Fragment(R.layout.ar_fragment_no_camera_feed) {
private var yodaModel: ModelRenderable? = null
lateinit var arScene: SceneView
//lateinit var session: Session
private var yodaNode: TransformableNode? = null
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
arScene = ar_no_camera_feed
// Change sensors to listen
loadModel(requireNotNull(this.activity).application)
/*session = Session(requireNotNull(this.activity).application)
val config = Config(session)
// Set the instant placement mode
config.instantPlacementMode = Config.InstantPlacementMode.LOCAL_Y_UP
session.configure(config)*/
}
private fun loadModel(application: Application) {
lifecycleScope.launch {
yodaModel = ModelRenderable
.builder()
.setSource(
application,
Uri.parse("models/scene.sfb")
)
.build()
.await()
Toast.makeText(
application,
"Model available",
Toast.LENGTH_SHORT
).show()
addNode()
}
}
private fun addNode() {
val ts = TransformationSystem(resources.displayMetrics, FootprintSelectionVisualizer())
// Add Motion Events listener to zoom in/out or rotate the rendered model
arScene.scene.addOnPeekTouchListener { hitTestResult, motionEvent ->
Log.i("ArNoCamFragment", motionEvent.toString())
ts.onTouch(hitTestResult, motionEvent)
}
yodaModel?.let {
yodaNode = TransformableNode(ts).apply {
setParent(scene)
worldPosition = Vector3(0f, -2f, -7f)
worldScale = Vector3(10f, 10f, 10f)
localRotation = Quaternion.eulerAngles(Vector3(30f, 0f, 0f))
rotationController.isEnabled = true
scaleController.isEnabled = true
scaleController.maxScale = 10f
scaleController.minScale = 0.1f
// If set to true the app will stop working since not supported!!!
translationController.isEnabled = false
renderable = it
}
Log.i("ArFragmentNoCam", "Adding yoda model!")
arScene.scene.addChild(yodaNode)
}
}
override fun onResume() {
super.onResume()
arScene.scene.view.resume()
}
override fun onPause() {
super.onPause()
arScene.scene.view.pause()
}
override fun onDestroy() {
super.onDestroy()
arScene.scene.view.destroy()
}
}