ARCore Android SDK v1.6.0
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000..89b083a
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,35 @@
+<!-- Use this issue tracker to file bugs and feature requests
+related to the ARCore SDK for Android (Java / C).
+
+For advice and general questions, please use the `ARCore` tag on
+Stack Overflow: stackoverflow.com/questions/tagged/ARCore
+
+- For Sceneform issues, use https://github.com/google-ar/sceneform-android-sdk
+- If using Unity, use https://github.com/google-ar/arcore-unity-sdk
+- If using Unreal, use https://github.com/google-ar/arcore-unreal-sdk
+-->
+
+
+### SPECIFIC ISSUE ENCOUNTERED
+
+
+### VERSIONS USED
+- Android Studio:
+- ARCore SDK for Android:
+- Device manufacturer, model, and O/S:
+- ARCore:
+ On Windows, use: `adb shell pm dump com.google.ar.core | findstr /i "packages: versionName"`
+ On macOS, use: `adb shell pm dump com.google.ar.core | egrep -i versionName\|packages:`
+- Output of `adb shell getprop ro.build.fingerprint`:
+
+
+### STEPS TO REPRODUCE THE ISSUE
+ 1.
+ 1.
+ 1.
+
+
+### WORKAROUNDS (IF ANY)
+
+
+### ADDITIONAL COMMENTS
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..f3501ce
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,30 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to the source code included
+in this project. There are just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to <https://cla.developers.google.com/> to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+Although we do not use the normal GitHub pull request process to incorporate
+contributions, it's okay to send us pull requests. Once we have received your
+CLA we will review and consider your contribution for inclusion in a future
+release of the SDK.
+
+## Community Guidelines
+
+This project follows
+[Google's Open Source Community
+Guidelines](https://opensource.google.com/conduct/).
+
diff --git a/LICENSE b/LICENSE
index 6448356..f731190 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,3 +1,6 @@
+Except as indicated at the end of this LICENSE file,
+files in this SDK are licensed as follows:
+
Copyright (c) 2017, Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -156,3 +159,21 @@
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
+
+===============================================================================
+
+The following files:
+tools/arcoreimg/linux/arcoreimg
+tools/arcoreimg/macos/arcoreimg
+tools/arcoreimg/windows/arcoreimg.exe
+
+are licensed as follows:
+Covered by the **Google APIs Terms of Service** at
+[https://developers.google.com/terms/](https://developers.google.com/terms/)
+
+===============================================================================
+
+The *.apk files attached to [GitHub releases](https://github.com/google-ar/arcore-android-sdk/releases)
+are licensed as follows:
+Covered by the **Google APIs Terms of Service** at
+[https://developers.google.com/terms/](https://developers.google.com/terms/)
diff --git a/libraries/include/arcore_c_api.h b/libraries/include/arcore_c_api.h
index 281dbb8..a393e60 100644
--- a/libraries/include/arcore_c_api.h
+++ b/libraries/include/arcore_c_api.h
@@ -119,7 +119,7 @@
/// @defgroup hit HitResult
/// Defines an intersection between a ray and estimated real-world geometry.
-/// @defgroup image ImageMetadata
+/// @defgroup image Image
/// Provides access to metadata from the camera image capture result.
/// @defgroup intrinsics Intrinsics
@@ -385,9 +385,13 @@
/// A position in space attached to a trackable
/// (@ref ownership "reference type, long-lived").
///
-/// Create with ArSession_acquireNewAnchor() or
-/// ArHitResult_acquireNewAnchor()<br>
-/// Release with ArAnchor_release()
+/// To create a new anchor call ArSession_acquireNewAnchor() or
+/// ArHitResult_acquireNewAnchor().<br>
+/// To have ARCore stop tracking the anchor, call ArAnchor_detach().<br>
+/// To release the memory associated with this anchor reference, call
+/// ArAnchor_release(). Note that, this will not cause ARCore to stop tracking
+/// the anchor. Other references to the same anchor acquired through
+/// ArAnchorList_acquireItem() are unaffected.
typedef struct ArAnchor_ ArAnchor;
/// A list of anchors (@ref ownership "value type").
@@ -977,10 +981,17 @@
/// @addtogroup session
/// @{
-/// Attempts to create a new ARCore session.
+/// Creates a new ARCore session. Prior to calling this function, your app must
+/// check that ARCore is installed by verifying that either:
///
-/// This is the entry point of ARCore. This function MUST be the first ARCore
-/// call made by an application.
+/// - ArCoreApk_requestInstall() or ArCoreApk_requestInstallCustom() returns
+/// #AR_INSTALL_STATUS_INSTALLED, or
+/// - ArCoreApk_checkAvailability() returns
+/// #AR_AVAILABILITY_SUPPORTED_INSTALLED.
+///
+/// This check must be performed prior to creating an ArSession, otherwise
+/// ArSession creation will fail, and subsequent installation or upgrade of
+/// ARCore will require an app restart and might cause Android to kill your app.
///
/// @param[in] env The application's @c JNIEnv object
/// @param[in] application_context A @c jobject referencing the application's
@@ -988,11 +999,22 @@
/// @param[out] out_session_pointer A pointer to an @c ArSession* to receive
/// the address of the newly allocated session.
/// @return #AR_SUCCESS or any of:
-/// - #AR_UNAVAILABLE_ARCORE_NOT_INSTALLED
-/// - #AR_UNAVAILABLE_DEVICE_NOT_COMPATIBLE
-/// - #AR_UNAVAILABLE_APK_TOO_OLD
-/// - #AR_UNAVAILABLE_SDK_TOO_OLD
-/// - #AR_ERROR_CAMERA_PERMISSION_NOT_GRANTED
+/// - #AR_ERROR_FATAL if an internal error occurred while creating the session.
+/// `adb logcat` may contain useful information.
+/// - #AR_ERROR_CAMERA_PERMISSION_NOT_GRANTED if your app does not have the
+/// [CAMERA](https://developer.android.com/reference/android/Manifest.permission.html#CAMERA)
+/// permission.
+/// - #AR_UNAVAILABLE_ARCORE_NOT_INSTALLED if the ARCore APK is not present.
+/// This can be prevented by the installation check described above.
+/// - #AR_UNAVAILABLE_DEVICE_NOT_COMPATIBLE if the device is not compatible with
+/// ARCore. If encountered after completing the installation check, this
+/// usually indicates a user has side-loaded ARCore onto an incompatible
+/// device.
+/// - #AR_UNAVAILABLE_APK_TOO_OLD if the installed ARCore APK is too old for the
+/// ARCore SDK with which this application was built. This can be prevented by
+/// the installation check described above.
+/// - #AR_UNAVAILABLE_SDK_TOO_OLD if the ARCore SDK that this app was built with
+/// is too old and no longer supported by the installed ARCore APK.
ArStatus ArSession_create(void *env,
void *application_context,
ArSession **out_session_pointer);
@@ -1384,10 +1406,13 @@
///
/// The list will always return 3 camera configs. The GPU texture resolutions
/// are the same in all three configs. Currently, most devices provide GPU
-/// texture resolution of 1920 x 1080, but devices might provide higher or lower
-/// resolution textures, depending on device capabilities. The CPU image
-/// resolutions returned are VGA, 720p, and a resolution matching the GPU
-/// texture.
+/// texture resolution of 1920 x 1080 but this may vary with device
+/// capabilities. The CPU image resolutions returned are VGA, a middle
+/// resolution, and a large resolution matching the GPU texture. The middle
+/// resolution will often be 1280 x 720, but may vary with device capabilities.
+///
+/// Note: Prior to ARCore 1.6 the middle CPU image resolution was guaranteed to
+/// be 1280 x 720 on all devices.
///
/// @param[in] session The ARCore session
/// @param[inout] list The list to fill. This list must have already
@@ -1479,17 +1504,28 @@
/// @addtogroup camera
/// @{
-/// Sets @c out_pose to the pose of the user's device in the world coordinate
-/// space at the time of capture of the current camera texture. The position and
-/// orientation of the pose follow the device's physical camera (they are not
-/// affected by display orientation), <b>but are rotated around the Z axis by a
-/// multiple of 90° to (approximately) align the axes with those of the <a
-/// href="https://developer.android.com/guide/topics/sensors/sensors_overview.html#sensors-coords"
-/// >Android Sensor Coordinate System</a></b>.
+/// Sets @c out_pose to the pose of the physical camera in world space for the
+/// latest frame. This is an OpenGL camera pose with +X pointing right, +Y
+/// pointing right up, -Z pointing in the direction the camera is looking, with
+/// "right" and "up" being relative to the image readout in the usual
+/// left-to-right top-to-bottom order. Specifically, this is the camera pose at
+/// the center of exposure of the center row of the image.
///
-/// This function will be deprecated in a future version of ARCore and replaced
-/// with one that returns the camera's actual physical pose without the 90°
-/// rotation.
+/// <b>For applications using the SDK for ARCore 1.5 and earlier</b>, the
+/// returned pose is rotated around the Z axis by a multiple of 90 degrees so
+/// that the axes correspond approximately to those of the <a
+/// href="https://developer.android.com/guide/topics/sensors/sensors_overview#sensors-coords">Android
+/// Sensor Coordinate System</a>.
+///
+/// See Also:
+///
+/// * ArCamera_getDisplayOrientedPose() for the pose of the virtual camera. It
+/// will differ by a local rotation about the Z axis by a multiple of 90
+/// degrees.
+/// * ArFrame_getAndroidSensorPose() for the pose of the Android sensor frame.
+/// It will differ in both orientation and location.
+/// * ArFrame_transformDisplayUvCoords() to convert viewport coordinates to
+/// texture coordinates.
///
/// Note: This pose is only useful when ArCamera_getTrackingState() returns
/// #AR_TRACKING_STATE_TRACKING and otherwise should not be used.
@@ -1502,18 +1538,24 @@
const ArCamera *camera,
ArPose *out_pose);
-/// Sets @c out_pose to the pose of the user's device in the world coordinate
-/// space at the time of capture of the current camera texture. The position of
-/// the pose is located at the device's camera, while the orientation
-/// approximately matches the orientation of the display (considering display
-/// rotation), using OpenGL camera conventions (+X right, +Y up, -Z in the
-/// direction the camera is looking).
+/// Sets @c out_pose to the virtual camera pose in world space for rendering AR
+/// content onto the latest frame. This is an OpenGL camera pose with +X
+/// pointing right, +Y pointing up, and -Z pointing in the direction the camera
+/// is looking, with "right" and "up" being relative to current logical display
+/// orientation.
+///
+/// See Also:
+///
+/// * ArCamera_getViewMatrix() to conveniently compute the OpenGL View Matrix.
+/// * ArCamera_getPose() for the physical pose of the camera. It will differ by
+/// a local rotation about the Z axis by a multiple of 90 degrees.
+/// * ArFrame_getAndroidSensorPose() for the pose of the android sensor frame.
+/// It will differ in both orientation and location.
+/// * ArSession_setDisplayGeometry() to update the display rotation.
///
/// Note: This pose is only useful when ArCamera_getTrackingState() returns
/// #AR_TRACKING_STATE_TRACKING and otherwise should not be used.
///
-/// See also: ArCamera_getViewMatrix()
-///
/// @param[in] session The ARCore session
/// @param[in] camera The session's camera (retrieved from any frame).
/// @param[inout] out_pose An already-allocated ArPose object into which the
@@ -1560,16 +1602,23 @@
float *dest_col_major_4x4);
/// Retrieves the unrotated and uncropped intrinsics for the image (CPU) stream.
-/// @param camera The intrinsics may change per frame, so this should be called
+/// The intrinsics may change per frame, so this should be called
/// on each frame to get the intrinsics for the current frame.
+///
+/// @param[in] session The ARCore session
+/// @param[in] camera The session's camera.
+/// @param[inout] out_camera_intrinsics The camera_intrinsics data.
void ArCamera_getImageIntrinsics(const ArSession *session,
const ArCamera *camera,
ArCameraIntrinsics *out_camera_intrinsics);
/// Retrieves the unrotated and uncropped intrinsics for the texture (GPU)
-/// stream.
-/// @param camera The intrinsics may change per frame, so this should be called
+/// stream. The intrinsics may change per frame, so this should be called
/// on each frame to get the intrinsics for the current frame.
+///
+/// @param[in] session The ARCore session
+/// @param[in] camera The session's camera.
+/// @param[inout] out_camera_intrinsics The camera_intrinsics data.
void ArCamera_getTextureIntrinsics(const ArSession *session,
const ArCamera *camera,
ArCameraIntrinsics *out_camera_intrinsics);
@@ -1587,6 +1636,9 @@
/// @{
/// Allocates a camera intrinstics object.
+///
+/// @param[in] session The ARCore session
+/// @param[inout] out_camera_intrinsics The camera_intrinsics data.
void ArCameraIntrinsics_create(const ArSession *session,
ArCameraIntrinsics **out_camera_intrinsics);
@@ -1647,10 +1699,18 @@
int64_t *out_timestamp_ns);
/// Sets @c out_pose to the pose of the <a
-/// href="https://developer.android.com/guide/topics/sensors/sensors_overview.html#sensors-coords"
-/// >Android Sensor Coordinate System</a> in the world coordinate space at the
-/// time of capture of the current camera texture. The orientation follows the
-/// device's "native" orientation (it is not affected by display orientation).
+/// href="https://developer.android.com/guide/topics/sensors/sensors_overview#sensors-coords">Android
+/// Sensor Coordinate System</a> in the world coordinate space for this frame.
+/// The orientation follows the device's "native" orientation (it is not
+/// affected by display rotation) with all axes corresponding to those of the
+/// Android sensor coordinates.
+///
+/// See Also:
+///
+/// * ArCamera_getDisplayOrientedPose() for the pose of the virtual camera.
+/// * ArCamera_getPose() for the pose of the physical camera.
+/// * ArFrame_getTimestamp() for the system time that this pose was estimated
+/// for.
///
/// Note: This pose is only useful when ArCamera_getTrackingState() returns
/// #AR_TRACKING_STATE_TRACKING and otherwise should not be used.
@@ -2036,13 +2096,12 @@
ArTrackingState *out_tracking_state);
/// Tells ARCore to stop tracking and forget this anchor. This call does not
-/// release the reference to the anchor - that must be done separately using
+/// release any references to the anchor - that must be done separately using
/// ArAnchor_release().
void ArAnchor_detach(ArSession *session, ArAnchor *anchor);
-/// Releases a reference to an anchor. This does not mean that the anchor will
-/// stop tracking, as it will be obtainable from e.g. ArSession_getAllAnchors()
-/// if any other references exist.
+/// Releases a reference to an anchor. To stop tracking for this anchor, call
+/// ArAnchor_detach() first.
///
/// This method may safely be called with @c nullptr - it will do nothing.
void ArAnchor_release(ArAnchor *anchor);
diff --git a/samples/README.md b/samples/README.md
deleted file mode 100644
index 39c440d..0000000
--- a/samples/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-Google AR SDK
-=====================
-Copyright (c) 2017 Google Inc. All rights reserved.
-
-[https://developers.google.com/ar/develop/java/getting-started](https://developers.google.com/ar/develop/java/getting-started)
-
-Please note, we do not accept pull requests.
diff --git a/samples/augmented_image_c/app/build.gradle b/samples/augmented_image_c/app/build.gradle
index b103c96..270e216 100644
--- a/samples/augmented_image_c/app/build.gradle
+++ b/samples/augmented_image_c/app/build.gradle
@@ -29,7 +29,7 @@
}
}
ndk {
- abiFilters "arm64-v8a", "x86"
+ abiFilters "arm64-v8a", "armeabi-v7a", "x86"
}
}
compileOptions {
@@ -51,8 +51,8 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:1.5.0'
- natives 'com.google.ar:core:1.5.0'
+ implementation 'com.google.ar:core:1.6.0'
+ natives 'com.google.ar:core:1.6.0'
implementation 'com.android.support:appcompat-v7:27.1.1'
implementation 'com.android.support:design:27.1.1'
diff --git a/samples/augmented_image_c/app/src/main/java/com/google/ar/core/examples/c/augmentedimage/AugmentedImageActivity.java b/samples/augmented_image_c/app/src/main/java/com/google/ar/core/examples/c/augmentedimage/AugmentedImageActivity.java
index b37296b..349474f 100644
--- a/samples/augmented_image_c/app/src/main/java/com/google/ar/core/examples/c/augmentedimage/AugmentedImageActivity.java
+++ b/samples/augmented_image_c/app/src/main/java/com/google/ar/core/examples/c/augmentedimage/AugmentedImageActivity.java
@@ -59,6 +59,7 @@
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ surfaceView.setWillNotDraw(false);
JniInterface.assetManager = getAssets();
nativeApplication = JniInterface.createNativeApplication(getAssets());
diff --git a/samples/augmented_image_c/gradle/wrapper/gradle-wrapper.jar b/samples/augmented_image_c/gradle/wrapper/gradle-wrapper.jar
index 758de96..29953ea 100644
--- a/samples/augmented_image_c/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/augmented_image_c/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/augmented_image_java/app/build.gradle b/samples/augmented_image_java/app/build.gradle
index d2cfa68..cd08905 100644
--- a/samples/augmented_image_java/app/build.gradle
+++ b/samples/augmented_image_java/app/build.gradle
@@ -25,7 +25,7 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:1.5.0'
+ implementation 'com.google.ar:core:1.6.0'
// Obj - a simple Wavefront OBJ file loader
// https://github.com/javagl/Obj
diff --git a/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/augmentedimage/AugmentedImageActivity.java b/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/augmentedimage/AugmentedImageActivity.java
index 3682fe5..3554139 100644
--- a/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/augmentedimage/AugmentedImageActivity.java
+++ b/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/augmentedimage/AugmentedImageActivity.java
@@ -99,6 +99,7 @@
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ surfaceView.setWillNotDraw(false);
fitToScanView = findViewById(R.id.image_view_fit_to_scan);
glideRequestManager = Glide.with(this);
diff --git a/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java b/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
index fded055..5ca562f 100644
--- a/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
+++ b/samples/augmented_image_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
@@ -124,19 +124,22 @@
* accurately follow static physical objects. This must be called <b>before</b> drawing virtual
* content.
*
- * @param frame The last {@code Frame} returned by {@link Session#update()}.
+ * @param frame The last {@code Frame} returned by {@link Session#update()} or null when ARCore is
+ * paused. See shared_camera_java sample details.
*/
public void draw(Frame frame) {
- // If display rotation changed (also includes view size change), we need to re-query the uv
- // coordinates for the screen rect, as they may have changed as well.
- if (frame.hasDisplayGeometryChanged()) {
- frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
- }
+ if (frame != null) {
+ // If display rotation changed (also includes view size change), we need to re-query the uv
+ // coordinates for the screen rect, as they may have changed as well.
+ if (frame.hasDisplayGeometryChanged()) {
+ frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
+ }
- if (frame.getTimestamp() == 0) {
- // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
- // drawing possible leftover data from previous sessions if the texture is reused.
- return;
+ if (frame.getTimestamp() == 0) {
+ // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
+ // drawing possible leftover data from previous sessions if the texture is reused.
+ return;
+ }
}
// No need to test or write depth, the screen quad has arbitrary depth, and is expected
diff --git a/samples/augmented_image_java/gradle/wrapper/gradle-wrapper.jar b/samples/augmented_image_java/gradle/wrapper/gradle-wrapper.jar
index 758de96..29953ea 100644
--- a/samples/augmented_image_java/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/augmented_image_java/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/cloud_anchor_java/app/build.gradle b/samples/cloud_anchor_java/app/build.gradle
index e7ed8cf..0139683 100644
--- a/samples/cloud_anchor_java/app/build.gradle
+++ b/samples/cloud_anchor_java/app/build.gradle
@@ -25,7 +25,7 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:1.5.0'
+ implementation 'com.google.ar:core:1.6.0'
// Obj - a simple Wavefront OBJ file loader
// https://github.com/javagl/Obj
diff --git a/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/cloudanchor/CloudAnchorActivity.java b/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/cloudanchor/CloudAnchorActivity.java
index 54c949a..0c7ff78 100644
--- a/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/cloudanchor/CloudAnchorActivity.java
+++ b/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/cloudanchor/CloudAnchorActivity.java
@@ -154,6 +154,7 @@
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ surfaceView.setWillNotDraw(false);
installRequested = false;
// Initialize UI components.
diff --git a/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java b/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
index fded055..5ca562f 100644
--- a/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
+++ b/samples/cloud_anchor_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
@@ -124,19 +124,22 @@
* accurately follow static physical objects. This must be called <b>before</b> drawing virtual
* content.
*
- * @param frame The last {@code Frame} returned by {@link Session#update()}.
+ * @param frame The last {@code Frame} returned by {@link Session#update()} or null when ARCore is
+ * paused. See shared_camera_java sample details.
*/
public void draw(Frame frame) {
- // If display rotation changed (also includes view size change), we need to re-query the uv
- // coordinates for the screen rect, as they may have changed as well.
- if (frame.hasDisplayGeometryChanged()) {
- frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
- }
+ if (frame != null) {
+ // If display rotation changed (also includes view size change), we need to re-query the uv
+ // coordinates for the screen rect, as they may have changed as well.
+ if (frame.hasDisplayGeometryChanged()) {
+ frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
+ }
- if (frame.getTimestamp() == 0) {
- // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
- // drawing possible leftover data from previous sessions if the texture is reused.
- return;
+ if (frame.getTimestamp() == 0) {
+ // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
+ // drawing possible leftover data from previous sessions if the texture is reused.
+ return;
+ }
}
// No need to test or write depth, the screen quad has arbitrary depth, and is expected
diff --git a/samples/cloud_anchor_java/gradle/wrapper/gradle-wrapper.jar b/samples/cloud_anchor_java/gradle/wrapper/gradle-wrapper.jar
index 758de96..29953ea 100644
--- a/samples/cloud_anchor_java/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/cloud_anchor_java/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/computervision_c/app/build.gradle b/samples/computervision_c/app/build.gradle
index 9e3f73d..fb9606f 100644
--- a/samples/computervision_c/app/build.gradle
+++ b/samples/computervision_c/app/build.gradle
@@ -29,7 +29,7 @@
}
}
ndk {
- abiFilters "arm64-v8a", "x86"
+ abiFilters "arm64-v8a", "armeabi-v7a", "x86"
}
}
compileOptions {
@@ -51,8 +51,8 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:1.5.0'
- natives 'com.google.ar:core:1.5.0'
+ implementation 'com.google.ar:core:1.6.0'
+ natives 'com.google.ar:core:1.6.0'
implementation 'com.android.support:appcompat-v7:27.1.1'
implementation 'com.android.support:design:27.1.1'
diff --git a/samples/computervision_c/app/src/main/java/com/google/ar/core/examples/c/computervision/ComputerVisionActivity.java b/samples/computervision_c/app/src/main/java/com/google/ar/core/examples/c/computervision/ComputerVisionActivity.java
index 9689b7b..322d317 100644
--- a/samples/computervision_c/app/src/main/java/com/google/ar/core/examples/c/computervision/ComputerVisionActivity.java
+++ b/samples/computervision_c/app/src/main/java/com/google/ar/core/examples/c/computervision/ComputerVisionActivity.java
@@ -23,6 +23,7 @@
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
+import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.View;
@@ -58,6 +59,7 @@
private TextView cameraIntrinsicsTextView;
private Switch focusModeSwitch;
+ private GestureDetector gestureDetector;
@Override
protected void onCreate(Bundle savedInstanceState) {
@@ -68,17 +70,25 @@
focusModeSwitch.setOnCheckedChangeListener(this::onFocusModeChanged);
surfaceView = findViewById(R.id.surfaceview);
- surfaceView.setOnTouchListener(
- (View view, MotionEvent motionEvent) -> {
- if (motionEvent.getAction() == MotionEvent.ACTION_UP) {
- splitterPosition = (splitterPosition < 0.5f) ? 1.0f : 0.0f;
+ gestureDetector =
+ new GestureDetector(
+ this,
+ new GestureDetector.SimpleOnGestureListener() {
+ @Override
+ public boolean onSingleTapUp(MotionEvent e) {
+ splitterPosition = (splitterPosition < 0.5f) ? 1.0f : 0.0f;
- // Turn off the CPU resolution radio buttons if CPU image is not displayed.
- showCameraConfigMenu(splitterPosition < 0.5f);
- }
+ // Turn off the CPU resolution radio buttons if CPU image is not displayed.
+ showCameraConfigMenu(splitterPosition < 0.5f);
+ return true;
+ }
- return true;
- });
+ @Override
+ public boolean onDown(MotionEvent e) {
+ return true;
+ }
+ });
+ surfaceView.setOnTouchListener((unusedView, event) -> gestureDetector.onTouchEvent(event));
// Set up renderer.
surfaceView.setPreserveEGLContextOnPause(true);
@@ -86,6 +96,7 @@
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ surfaceView.setWillNotDraw(false);
nativeApplication = JniInterface.createNativeApplication(getAssets());
}
diff --git a/samples/computervision_c/gradle/wrapper/gradle-wrapper.jar b/samples/computervision_c/gradle/wrapper/gradle-wrapper.jar
index 758de96..29953ea 100644
--- a/samples/computervision_c/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/computervision_c/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/computervision_java/app/build.gradle b/samples/computervision_java/app/build.gradle
index f770fc4..89d9070 100644
--- a/samples/computervision_java/app/build.gradle
+++ b/samples/computervision_java/app/build.gradle
@@ -25,7 +25,7 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:1.5.0'
+ implementation 'com.google.ar:core:1.6.0'
// Obj - a simple Wavefront OBJ file loader
// https://github.com/javagl/Obj
diff --git a/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java b/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
index fded055..5ca562f 100644
--- a/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
+++ b/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
@@ -124,19 +124,22 @@
* accurately follow static physical objects. This must be called <b>before</b> drawing virtual
* content.
*
- * @param frame The last {@code Frame} returned by {@link Session#update()}.
+ * @param frame The last {@code Frame} returned by {@link Session#update()} or null when ARCore is
+ * paused. See shared_camera_java sample details.
*/
public void draw(Frame frame) {
- // If display rotation changed (also includes view size change), we need to re-query the uv
- // coordinates for the screen rect, as they may have changed as well.
- if (frame.hasDisplayGeometryChanged()) {
- frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
- }
+ if (frame != null) {
+ // If display rotation changed (also includes view size change), we need to re-query the uv
+ // coordinates for the screen rect, as they may have changed as well.
+ if (frame.hasDisplayGeometryChanged()) {
+ frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
+ }
- if (frame.getTimestamp() == 0) {
- // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
- // drawing possible leftover data from previous sessions if the texture is reused.
- return;
+ if (frame.getTimestamp() == 0) {
+ // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
+ // drawing possible leftover data from previous sessions if the texture is reused.
+ return;
+ }
}
// No need to test or write depth, the screen quad has arbitrary depth, and is expected
diff --git a/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/computervision/ComputerVisionActivity.java b/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/computervision/ComputerVisionActivity.java
index 334ebe4..89e1dd5 100644
--- a/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/computervision/ComputerVisionActivity.java
+++ b/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/computervision/ComputerVisionActivity.java
@@ -24,9 +24,12 @@
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
+import android.view.GestureDetector;
+import android.view.MotionEvent;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.RadioButton;
+import android.widget.RadioGroup;
import android.widget.Switch;
import android.widget.TextView;
import android.widget.Toast;
@@ -82,9 +85,9 @@
private boolean installRequested;
private final SnackbarHelper messageSnackbarHelper = new SnackbarHelper();
private CpuImageDisplayRotationHelper cpuImageDisplayRotationHelper;
- private CpuImageTouchListener cpuImageTouchListener;
private final CpuImageRenderer cpuImageRenderer = new CpuImageRenderer();
private final EdgeDetector edgeDetector = new EdgeDetector();
+ private GestureDetector gestureDetector;
// This lock prevents changing resolution as the frame is being rendered. ARCore requires all
// cpu images to be released before changing resolution.
@@ -123,10 +126,31 @@
focusModeSwitch.setOnCheckedChangeListener(this::onFocusModeChanged);
cpuImageDisplayRotationHelper = new CpuImageDisplayRotationHelper(/*context=*/ this);
- cpuImageTouchListener = new CpuImageTouchListener(cpuImageRenderer, /*context=*/ this);
+
+ gestureDetector =
+ new GestureDetector(
+ this,
+ new GestureDetector.SimpleOnGestureListener() {
+ @Override
+ public boolean onSingleTapUp(MotionEvent e) {
+ float newPosition = (cpuImageRenderer.getSplitterPosition() < 0.5f) ? 1.0f : 0.0f;
+ cpuImageRenderer.setSplitterPosition(newPosition);
+
+ // Display the CPU resolution related UI only when CPU image is being displayed.
+ boolean show = (newPosition < 0.5f);
+ RadioGroup radioGroup = (RadioGroup) findViewById(R.id.radio_camera_configs);
+ radioGroup.setVisibility(show ? View.VISIBLE : View.INVISIBLE);
+ return true;
+ }
+
+ @Override
+ public boolean onDown(MotionEvent e) {
+ return true;
+ }
+ });
// Setup a touch listener to control the texture splitter position.
- surfaceView.setOnTouchListener(cpuImageTouchListener);
+ surfaceView.setOnTouchListener((unusedView, event) -> gestureDetector.onTouchEvent(event));
// Set up renderer.
surfaceView.setPreserveEGLContextOnPause(true);
@@ -134,6 +158,7 @@
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ surfaceView.setWillNotDraw(false);
installRequested = false;
}
diff --git a/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/computervision/CpuImageTouchListener.java b/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/computervision/CpuImageTouchListener.java
deleted file mode 100644
index a18b5de..0000000
--- a/samples/computervision_java/app/src/main/java/com/google/ar/core/examples/java/computervision/CpuImageTouchListener.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package com.google.ar.core.examples.java.computervision;
-
-import android.app.Activity;
-import android.content.Context;
-import android.view.MotionEvent;
-import android.view.View;
-import android.widget.RadioGroup;
-
-/**
- * Tracks the touches to the rendering view and updates the splitter position in {@link
- * CpuImageRenderer}.
- */
-class CpuImageTouchListener implements View.OnTouchListener {
-
- private final CpuImageRenderer cpuImageRenderer;
- private final Context context;
-
- public CpuImageTouchListener(CpuImageRenderer cpuImageRenderer, Context context) {
- this.cpuImageRenderer = cpuImageRenderer;
- this.context = context;
- }
-
- @Override
- public boolean onTouch(View view, MotionEvent motionEvent) {
- if (motionEvent.getAction() == MotionEvent.ACTION_UP) {
- float newPosition = (cpuImageRenderer.getSplitterPosition() < 0.5f) ? 1.0f : 0.0f;
- cpuImageRenderer.setSplitterPosition(newPosition);
-
- // Display the CPU resolution related UI only when CPU image is being displayed.
- boolean show = (newPosition < 0.5f);
- RadioGroup radioGroup =
- (RadioGroup) ((Activity) context).findViewById(R.id.radio_camera_configs);
- radioGroup.setVisibility(show ? View.VISIBLE : View.INVISIBLE);
- }
-
- return true;
- }
-}
diff --git a/samples/computervision_java/gradle/wrapper/gradle-wrapper.jar b/samples/computervision_java/gradle/wrapper/gradle-wrapper.jar
index 758de96..29953ea 100644
--- a/samples/computervision_java/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/computervision_java/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/hello_ar_c/app/build.gradle b/samples/hello_ar_c/app/build.gradle
index a92aafa..7e1aa0a 100644
--- a/samples/hello_ar_c/app/build.gradle
+++ b/samples/hello_ar_c/app/build.gradle
@@ -29,7 +29,7 @@
}
}
ndk {
- abiFilters "arm64-v8a", "x86"
+ abiFilters "arm64-v8a", "armeabi-v7a", "x86"
}
}
compileOptions {
@@ -51,8 +51,8 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:1.5.0'
- natives 'com.google.ar:core:1.5.0'
+ implementation 'com.google.ar:core:1.6.0'
+ natives 'com.google.ar:core:1.6.0'
implementation 'com.android.support:appcompat-v7:27.1.1'
implementation 'com.android.support:design:27.1.1'
diff --git a/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java b/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java
index 693d16e..f61a75d 100644
--- a/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java
+++ b/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java
@@ -107,6 +107,7 @@
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ surfaceView.setWillNotDraw(false);
JniInterface.assetManager = getAssets();
nativeApplication = JniInterface.createNativeApplication(getAssets());
diff --git a/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar b/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar
index 758de96..29953ea 100644
--- a/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/hello_ar_java/app/build.gradle b/samples/hello_ar_java/app/build.gradle
index 02634a2..d32ffe7 100644
--- a/samples/hello_ar_java/app/build.gradle
+++ b/samples/hello_ar_java/app/build.gradle
@@ -26,7 +26,7 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:1.5.0'
+ implementation 'com.google.ar:core:1.6.0'
// Obj - a simple Wavefront OBJ file loader
// https://github.com/javagl/Obj
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
index fded055..5ca562f 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/common/rendering/BackgroundRenderer.java
@@ -124,19 +124,22 @@
* accurately follow static physical objects. This must be called <b>before</b> drawing virtual
* content.
*
- * @param frame The last {@code Frame} returned by {@link Session#update()}.
+ * @param frame The last {@code Frame} returned by {@link Session#update()} or null when ARCore is
+ * paused. See shared_camera_java sample details.
*/
public void draw(Frame frame) {
- // If display rotation changed (also includes view size change), we need to re-query the uv
- // coordinates for the screen rect, as they may have changed as well.
- if (frame.hasDisplayGeometryChanged()) {
- frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
- }
+ if (frame != null) {
+ // If display rotation changed (also includes view size change), we need to re-query the uv
+ // coordinates for the screen rect, as they may have changed as well.
+ if (frame.hasDisplayGeometryChanged()) {
+ frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
+ }
- if (frame.getTimestamp() == 0) {
- // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
- // drawing possible leftover data from previous sessions if the texture is reused.
- return;
+ if (frame.getTimestamp() == 0) {
+ // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
+ // drawing possible leftover data from previous sessions if the texture is reused.
+ return;
+ }
}
// No need to test or write depth, the screen quad has arbitrary depth, and is expected
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java
index 00a0749..42453b0 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java
@@ -114,6 +114,7 @@
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ surfaceView.setWillNotDraw(false);
installRequested = false;
}
diff --git a/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar b/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar
index 758de96..29953ea 100644
--- a/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar
Binary files differ