我想用ARCore将图像放置在墙上。从常规图像视图转换图像。我只检测垂直表面,但是当我放置图像时,它是这样放置的:我无法弄清楚如何像在现实生活中那样将图像悬挂在墙上。
AR fragment
fragment = (ArFragment)
getSupportFragmentManager().findFragmentById(R.id.sceneform_fragment);
CompletableFuture<ViewRenderable> imgView = ViewRenderable.builder()
.setView(this, R.layout.textview)
.build();
CompletableFuture.allOf(imgView).handle((notUsed, throwable) -> {
// When you build a Renderable, Sceneform loads its resources in the background while
// returning a CompletableFuture. Call handle(), thenAccept(), or check isDone()
// before calling get().
if (throwable != null) {
Log.d("MainActivity", "onCreate: " + "Unable to load renderable");
return null;
}
try {
imageViewRenderable = imgView.get();
} catch (InterruptedException | ExecutionException ex) {
Log.d("MainActivity", "onCreate: Unable to load renderable");
}
return null;
});
try {
mArSession = new Session(this);
} catch (UnavailableArcoreNotInstalledException e) {
e.printStackTrace();
} catch (UnavailableApkTooOldException e) {
e.printStackTrace();
} catch (UnavailableSdkTooOldException e) {
e.printStackTrace();
} catch (UnavailableDeviceNotCompatibleException e) {
e.printStackTrace();
}
mArConfig = new Config(mArSession);
mArConfig.setUpdateMode(Config.UpdateMode.LATEST_CAMERA_IMAGE);
mArConfig.setPlaneFindingMode(Config.PlaneFindingMode.VERTICAL);
mArSession.configure(mArConfig);
fragment.getArSceneView().setupSession(mArSession);
fragment.setOnTapArPlaneListener((HitResult hitresult, Plane plane, MotionEvent motionevent) -> {
Anchor anchor = hitresult.createAnchor();
AnchorNode anchorNode = new AnchorNode(anchor);
anchorNode.setParent(fragment.getArSceneView().getScene());
TransformableNode art = new TransformableNode(fragment.getTransformationSystem());
art.setParent(anchorNode);
art.setRenderable(imageViewRenderable);
art.select();
ImageView imageView = (ImageView)imageViewRenderable.getView();
Picasso.get().load("http://www.cbk-enschede.nl/plaatjes/ens/jpg/GSA023.jpg").into(imageView);
}
);
答案 0 :(得分:0)
我认为您需要Google的ARCore Augmented Images方法。
这是用于增强图像的有效Kotlin代码,如下所示:
class MyArFragment : ArFragment() {
private val trackableMap = mutableMapOf<String, AugmentedImageAnchorNode>()
var setOnStarted: (() -> Unit)? = null
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View? {
val view = super.onCreateView(inflater, container, savedInstanceState)
view!!.visibility = View.GONE
planeDiscoveryController.hide()
planeDiscoveryController.setInstructionView(null)
arSceneView.planeRenderer.isEnabled = false
arSceneView.scene.setOnTouchListener { _, motionEvent ->
swipeAnGestureDetector.onTouchEvent(motionEvent)
}
arSceneView.scene.addOnUpdateListener(::onUpdateFrame)
ArResources.init(this.context!!).handle { _, _ ->
setOnStarted?.invoke()
view.visibility = View.VISIBLE
}
return view
}
override fun onPause() {
super.onPause()
trackableMap.forEach {
arSceneView.scene.removeChild(it.value)
}
trackableMap.clear()
}
override fun getSessionConfiguration(session: Session): Config {
val config = super.getSessionConfiguration(session)
config.focusMode = Config.FocusMode.AUTO
config.augmentedImageDatabase = AugmentedImageDatabase.deserialize(session, context!!.resources.assets.open("yourAR.imgdb"))
return config
}
private fun createArNode(image: AugmentedImage) {
Logger.d("create : ${image.name}(${image.index}), pose: ${image.centerPose}, ex: ${image.extentX}, ez: ${image.extentZ}")
when (image.name) {
"object" -> {
val node = MyAnchorNode().init(image)
trackableMap[image.name] = node
arSceneView.scene.addChild(node)
Toast.makeText(context, "add object", Toast.LENGTH_LONG).show()
}
}
}
private fun onUpdateFrame(@Suppress("UNUSED_PARAMETER") frameTime: FrameTime?) {
val frame = arSceneView.arFrame
if (frame == null || frame.camera.trackingState != TrackingState.TRACKING) {
return
}
frame.getUpdatedTrackables(AugmentedImage::class.java).forEach { image ->
when (image.trackingState) {
TrackingState.TRACKING -> if (trackableMap.contains(image.name)) {
if (trackableMap[image.name]?.update(image) == true) {
Logger.d("update node: ${image.name}(${image.index}), pose: ${image.centerPose}, ex: ${image.extentX}, ez: ${image.extentZ}")
}
} else {
createArNode(image)
}
TrackingState.STOPPED -> {
Logger.d("remove note: ${image.name}(${image.index})")
trackableMap.remove(image.name)
}
else -> {
}
}
}
}
private val swipeAnGestureDetector = GestureDetector(null, object : GestureDetector.SimpleOnGestureListener() {
private val SWIPE_DISTANCE_THRESHOLD = 150
private val SWIPE_VELOCITY_THRESHOLD = 150
override fun onDown(e: MotionEvent?): Boolean {
return true
}
override fun onFling(e1: MotionEvent, e2: MotionEvent, velocityX: Float, velocityY: Float): Boolean {
val objectAN = trackableMap["object"] as? MyAnchorNode
if (objectAN != null && objectAN.isActive) {
val distanceX = e2.x - e1.x
val distanceY = e2.y - e1.y
if (Math.abs(distanceX) > Math.abs(distanceY) && Math.abs(distanceX) > SWIPE_DISTANCE_THRESHOLD && Math.abs(velocityX) > SWIPE_VELOCITY_THRESHOLD) {
if (distanceX > 0) {
objectAN.forwardScene()
} else {
objectAN.backwardScene()
}
return true
}
}
return false
}
})
}