ARCore Android SDK v1.0.0
diff --git a/README.md b/README.md
index 39c440d..6012723 100644
--- a/README.md
+++ b/README.md
@@ -5,3 +5,11 @@
[https://developers.google.com/ar/develop/java/getting-started](https://developers.google.com/ar/develop/java/getting-started)
Please note, we do not accept pull requests.
+
+## Additional Notes
+
+You must disclose the use of ARCore, and how it collects and processes data.
+This can be done by displaying a prominent link to the site
+"How Google uses data when you use our partners' sites or apps",
+(located at www.google.com/policies/privacy/partners/, or any other URL Google
+may provide from time to time).
diff --git a/libraries/include/arcore_c_api.h b/libraries/include/arcore_c_api.h
index bdf5a8f..fe3152e 100644
--- a/libraries/include/arcore_c_api.h
+++ b/libraries/include/arcore_c_api.h
@@ -13,8 +13,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-#ifndef ARCORE_C_API_H_
-#define ARCORE_C_API_H_
+#ifndef THIRD_PARTY_ARCORE_AR_CORE_C_API_ARCORE_C_API_H_
+#define THIRD_PARTY_ARCORE_AR_CORE_C_API_ARCORE_C_API_H_
#include <stddef.h>
#include <stdint.h>
@@ -86,20 +86,53 @@
/// beyond the scope of a single rendering frame, either an anchor should be
/// created or a position relative to a nearby existing anchor should be used.
-/// @defgroup common Common Definitions
-/// Shared types and constants.
+/// @defgroup anchor Anchor
+/// Describes a fixed location and orientation in the real world.
+/// @defgroup arcoreapk ArCoreApk
+/// Management of the ARCore service APK
/// @defgroup session Session
/// Session management.
+/// @defgroup camera Camera
+/// Provides information about the camera that is used to capture images.
+
/// @defgroup config Configuration
/// Session configuration.
/// @defgroup frame Frame
/// Per-frame state.
+/// @defgroup hit HitResult
+/// Defines an intersection between a ray and estimated real-world geometry.
+
+/// @defgroup image ImageMetadata
+/// Provides access to metadata from the camera image capture result.
+
+/// @defgroup light LightEstimate
+/// Holds information about the estimated lighting of the real scene.
+
+/// @defgroup plane Plane
+/// Describes the current best knowledge of a real-world planar surface.
+
+/// @defgroup point Point
+/// Represents a point in space that ARCore is tracking.
+
+/// @defgroup pointcloud PointCloud
+/// Contains a set of observed 3D points and confidence values.
+
+/// @defgroup pose Pose
+/// Represents an immutable rigid transformation from one coordinate
+/// space to another.
+
+/// @defgroup session Session
+/// Session management.
+
/// @defgroup trackable Trackable
-/// Planes and points, as well as Anchors.
+/// Something that can be tracked and that Anchors can be attached to.
+
+/// @defgroup common Common Definitions
+/// Shared types and constants
/// @defgroup cpp_helpers C++ helper functions
@@ -125,7 +158,7 @@
/// @}
-/// @addtogroup common
+/// @addtogroup pose
/// @{
/// A structured rigid transformation (@ref ownership "value type").
@@ -136,9 +169,9 @@
/// @}
-// Frame and frame objects.
+// Camera.
-/// @addtogroup frame
+/// @addtogroup camera
/// @{
/// The virtual and physical camera
@@ -148,6 +181,13 @@
/// Release with ArCamera_release()
typedef struct ArCamera_ ArCamera;
+/// @}
+
+// Frame and frame objects.
+
+/// @addtogroup frame
+/// @{
+
/// The world state resulting from an update (@ref ownership "value type").
///
/// Allocate with ArFrame_create()<br>
@@ -155,6 +195,13 @@
/// Release with ArFrame_destroy()
typedef struct ArFrame_ ArFrame;
+/// @}
+
+// LightEstimate.
+
+/// @addtogroup light
+/// @{
+
/// An estimate of the real-world lighting (@ref ownership "value type").
///
/// Allocate with ArLightEstimate_create()<br>
@@ -162,6 +209,13 @@
/// Release with ArLightEstimate_destroy()
typedef struct ArLightEstimate_ ArLightEstimate;
+/// @}
+
+// PointCloud.
+
+/// @addtogroup pointcloud
+/// @{
+
/// A cloud of tracked 3D visual feature points
/// (@ref ownership "reference type, large data").
///
@@ -169,6 +223,13 @@
/// Release with ArPointCloud_release()
typedef struct ArPointCloud_ ArPointCloud;
+/// @}
+
+// ImageMetadata.
+
+/// @addtogroup image
+/// @{
+
/// Camera capture metadata (@ref ownership "reference type, large data").
///
/// Acquire with ArFrame_acquireImageMetadata()<br>
@@ -191,20 +252,39 @@
/// Release with ArTrackableList_destroy()
typedef struct ArTrackableList_ ArTrackableList;
+/// @}
+
+// Plane
+
+/// @addtogroup plane
+/// @{
+
/// A detected planar surface (@ref ownership "reference type, long-lived").
///
/// Trackable type: #AR_TRACKABLE_PLANE <br>
/// Release with: ArTrackable_release()
typedef struct ArPlane_ ArPlane;
+/// @}
+
+// Point
+
+/// @addtogroup point
+/// @{
+
/// An arbitrary point in space (@ref ownership "reference type, long-lived").
///
/// Trackable type: #AR_TRACKABLE_POINT <br>
/// Release with: ArTrackable_release()
typedef struct ArPoint_ ArPoint;
+/// @}
+
// Anchors.
+/// @addtogroup anchor
+/// @{
+
/// A position in space attached to a trackable
/// (@ref ownership "reference type, long-lived").
///
@@ -223,7 +303,7 @@
// Hit result functionality.
-/// @addtogroup frame
+/// @addtogroup hit
/// @{
/// A single trackable hit (@ref ownership "value type").
@@ -262,23 +342,23 @@
#ifdef __cplusplus
/// Upcasts to ArTrackable
-inline ArTrackable* ArAsTrackable(ArPlane* plane) {
- return reinterpret_cast<ArTrackable*>(plane);
+inline ArTrackable *ArAsTrackable(ArPlane *plane) {
+ return reinterpret_cast<ArTrackable *>(plane);
}
/// Upcasts to ArTrackable
-inline ArTrackable* ArAsTrackable(ArPoint* point) {
- return reinterpret_cast<ArTrackable*>(point);
+inline ArTrackable *ArAsTrackable(ArPoint *point) {
+ return reinterpret_cast<ArTrackable *>(point);
}
/// Downcasts to ArPlane.
-inline ArPlane* ArAsPlane(ArTrackable* trackable) {
- return reinterpret_cast<ArPlane*>(trackable);
+inline ArPlane *ArAsPlane(ArTrackable *trackable) {
+ return reinterpret_cast<ArPlane *>(trackable);
}
/// Downcasts to ArPoint.
-inline ArPoint* ArAsPoint(ArTrackable* trackable) {
- return reinterpret_cast<ArPoint*>(trackable);
+inline ArPoint *ArAsPoint(ArTrackable *trackable) {
+ return reinterpret_cast<ArPoint *>(trackable);
}
#endif
/// @}
@@ -308,8 +388,7 @@
AR_TRACKABLE_POINT = 0x41520102,
/// An invalid Trackable type.
- AR_TRACKABLE_NOT_VALID = 0,
-};
+ AR_TRACKABLE_NOT_VALID = 0};
/// @ingroup common
/// Return code indicating success or failure of a method.
@@ -368,16 +447,16 @@
/// because the camera hasn't fully started.
AR_ERROR_NOT_YET_AVAILABLE = -12,
+ /// The android camera has been reallocated to a higher priority app or is
+ /// otherwise unavailable.
+ AR_ERROR_CAMERA_NOT_AVAILABLE = -13,
+
/// The ARCore APK is not installed on this device.
AR_UNAVAILABLE_ARCORE_NOT_INSTALLED = -100,
/// The device is not currently compatible with ARCore.
AR_UNAVAILABLE_DEVICE_NOT_COMPATIBLE = -101,
- /// The version of Android is less than the minimum version ARCore supports
- /// (currently N, API level 24).
- AR_UNAVAILABLE_ANDROID_VERSION_NOT_SUPPORTED = -102,
-
/// The ARCore APK currently installed on device is too old and needs to be
/// updated.
AR_UNAVAILABLE_APK_TOO_OLD = -103,
@@ -385,7 +464,10 @@
/// The ARCore APK currently installed no longer supports the ARCore SDK
/// that the application was built with.
AR_UNAVAILABLE_SDK_TOO_OLD = -104,
-};
+
+ /// The user declined installation of the ARCore APK during this run of the
+ /// application and the current request was not marked as user-initiated.
+ AR_UNAVAILABLE_USER_DECLINED_INSTALLATION = -105};
/// @ingroup common
/// Describes the tracking state of a @c Trackable, an ::ArAnchor or the
@@ -403,8 +485,62 @@
/// ARCore has stopped tracking this Trackable and will never resume
/// tracking it.
- AR_TRACKING_STATE_STOPPED = 2,
-};
+ AR_TRACKING_STATE_STOPPED = 2};
+
+/// @ingroup arcoreapk
+/// Describes the current state of ARCore availability on the device.
+AR_DEFINE_ENUM(ArAvailability){
+ /// An internal error occurred while determining ARCore availability.
+ AR_AVAILABILITY_UNKNOWN_ERROR = 0,
+ /// ARCore is not installed, and a query has been issued to check if ARCore
+ /// is is supported.
+ AR_AVAILABILITY_UNKNOWN_CHECKING = 1,
+ /// ARCore is not installed, and the query to check if ARCore is supported
+ /// timed out. This may be due to the device being offline.
+ AR_AVAILABILITY_UNKNOWN_TIMED_OUT = 2,
+ /// ARCore is not supported on this device.
+ AR_AVAILABILITY_UNSUPPORTED_DEVICE_NOT_CAPABLE = 100,
+ /// The device and Android version are supported, but the ARCore APK is not
+ /// installed.
+ AR_AVAILABILITY_SUPPORTED_NOT_INSTALLED = 201,
+ /// The device and Android version are supported, and a version of the
+ /// ARCore APK is installed, but that ARCore APK version is too old.
+ AR_AVAILABILITY_SUPPORTED_APK_TOO_OLD = 202,
+ /// ARCore is supported, installed, and available to use.
+ AR_AVAILABILITY_SUPPORTED_INSTALLED = 203};
+
+/// @ingroup arcoreapk
+/// Indicates the outcome of a call to ArCoreApk_requestInstall().
+AR_DEFINE_ENUM(ArInstallStatus){
+ /// The requested resource is already installed.
+ AR_INSTALL_STATUS_INSTALLED = 0,
+ /// Installation of the resource was requested. The current activity will be
+ /// paused.
+ AR_INSTALL_STATUS_INSTALL_REQUESTED = 1};
+
+/// @ingroup arcoreapk
+/// Controls the behavior of the installation UI.
+AR_DEFINE_ENUM(ArInstallBehavior){
+ /// Hide the Cancel button during initial prompt and prevent user from
+ /// exiting via tap-outside.
+ ///
+ /// Note: The BACK button or tapping outside of any marketplace-provided
+ /// install dialog will still decline the installation.
+ AR_INSTALL_BEHAVIOR_REQUIRED = 0,
+ /// Include Cancel button in initial prompt and allow easily backing out
+ /// after installation has been initiated.
+ AR_INSTALL_BEHAVIOR_OPTIONAL = 1};
+
+/// @ingroup arcoreapk
+/// Controls the message displayed by the installation UI.
+AR_DEFINE_ENUM(ArInstallUserMessageType){
+ /// Display a localized message like "This application requires ARCore...".
+ AR_INSTALL_USER_MESSAGE_TYPE_APPLICATION = 0,
+ /// Display a localized message like "This feature requires ARCore...".
+ AR_INSTALL_USER_MESSAGE_TYPE_FEATURE = 1,
+ /// Application has explained why ARCore is required prior to calling
+ /// ArCoreApk_requestInstall(), skip user education dialog.
+ AR_INSTALL_USER_MESSAGE_TYPE_USER_ALREADY_INFORMED = 2};
/// @ingroup config
/// Select the behavior of the lighting estimation subsystem.
@@ -413,8 +549,7 @@
AR_LIGHT_ESTIMATION_MODE_DISABLED = 0,
/// Lighting estimation is enabled, generating a single-value intensity
/// estimate.
- AR_LIGHT_ESTIMATION_MODE_AMBIENT_INTENSITY = 1,
-};
+ AR_LIGHT_ESTIMATION_MODE_AMBIENT_INTENSITY = 1};
/// @ingroup config
/// Select the behavior of the plane detection subsystem.
@@ -422,8 +557,7 @@
/// Plane detection is disabled.
AR_PLANE_FINDING_MODE_DISABLED = 0,
/// Detection of only horizontal planes is enabled.
- AR_PLANE_FINDING_MODE_HORIZONTAL = 1,
-};
+ AR_PLANE_FINDING_MODE_HORIZONTAL = 1};
/// @ingroup config
/// Selects the behavior of ArSession_update().
@@ -434,27 +568,34 @@
/// @c update() will return immediately without blocking. If no new camera
/// image is available, then @c update() will return the most recent
/// ::ArFrame object.
- AR_UPDATE_MODE_LATEST_CAMERA_IMAGE = 1,
-};
+ AR_UPDATE_MODE_LATEST_CAMERA_IMAGE = 1};
-/// @ingroup trackable
+/// @ingroup plane
/// Simple summary of the normal vector of a plane, for filtering purposes.
AR_DEFINE_ENUM(ArPlaneType){
/// A horizontal plane facing upward (for example a floor or tabletop).
AR_PLANE_HORIZONTAL_UPWARD_FACING = 0,
/// A horizontal plane facing downward (for example a ceiling).
- AR_PLANE_HORIZONTAL_DOWNWARD_FACING = 1,
-};
+ AR_PLANE_HORIZONTAL_DOWNWARD_FACING = 1};
-/// @ingroup frame
+/// @ingroup light
/// Tracks the validity of a light estimate.
AR_DEFINE_ENUM(ArLightEstimateState){
/// The light estimate is not valid this frame and should not be used for
/// rendering.
AR_LIGHT_ESTIMATE_STATE_NOT_VALID = 0,
/// The light estimate is valid this frame.
- AR_LIGHT_ESTIMATE_STATE_VALID = 1,
-};
+ AR_LIGHT_ESTIMATE_STATE_VALID = 1};
+
+/// @ingroup point
+/// Indicates the orientation mode of the ::ArPoint.
+AR_DEFINE_ENUM(ArPointOrientationMode){
+ /// The orientation of the ::ArPoint is initialized to identity but may
+ /// adjust slightly over time.
+ AR_POINT_ORIENTATION_INITIALIZED_TO_IDENTITY = 0,
+ /// The orientation of the ::ArPoint will follow the behavior described in
+ /// ArHitResult_getHitPose().
+ AR_POINT_ORIENTATION_ESTIMATED_SURFACE_NORMAL = 1};
#undef AR_DEFINE_ENUM
@@ -465,6 +606,123 @@
// Note: destroy methods do not take ArSession* to allow late destruction in
// finalizers of garbage-collected languages such as Java.
+/// @addtogroup arcoreapk
+/// @{
+
+/// Determines if ARCore is supported on this device. This may initiate a query
+/// with a remote service to determine if the device is compatible, in which
+/// case it will return immediately with @c out_availability set to
+/// #AR_AVAILABILITY_UNKNOWN_CHECKING.
+///
+/// Note: A result #AR_AVAILABILITY_SUPPORTED_INSTALLED only indicates presence
+/// of a suitably versioned ARCore APK. Session creation may still fail if the
+/// ARCore APK has been sideloaded onto an incompatible device.
+///
+/// May be called prior to ArSession_create().
+///
+/// @param[in] env The application's @c JNIEnv object
+/// @param[in] application_context A @c jobject referencing the application's
+/// Android @c Context.
+/// @param[out] out_availability A pointer to an ArAvailability to receive
+/// the result.
+void ArCoreApk_checkAvailability(void *env,
+ void *application_context,
+ ArAvailability *out_availability);
+
+/// Initiates installation of ARCore if needed. When your apllication launches
+/// or enters an AR mode, it should call this method with @c
+/// user_requested_install = 1.
+///
+/// If ARCore is installed and compatible, this function will set @c
+/// out_install_status to #AR_INSTALL_STATUS_INSTALLED.
+///
+/// If ARCore is not currently installed or the installed version not
+/// compatible, the function will set @c out_install_status to
+/// #AR_INSTALL_STATUS_INSTALL_REQUESTED and return immediately. Your current
+/// activity will then pause while the user is informed about the requierment of
+/// ARCore and offered the opportunity to install it.
+///
+/// When your activity resumes, you should call this method again, this time
+/// with @c user_requested_install = 0. This will either set
+/// @c out_install_status to #AR_INSTALL_STATUS_INSTALLED or return an error
+/// code indicating the reason that installation could not be completed.
+///
+/// ARCore-optional applications must ensure that ArCoreApk_checkAvailability()
+/// returns one of the <tt>AR_AVAILABILITY_SUPPORTED_...</tt> values before
+/// calling this method.
+///
+/// See <A
+/// href="https://github.com/google-ar/arcore-android-sdk/tree/master/samples">
+/// our sample code</A> for an example of how an ARCore-required application
+/// should use this function.
+///
+/// May be called prior to ArSession_create().
+///
+/// For more control over the message displayed and ease of exiting the process,
+/// see ArCoreApk_requestInstallCustom().
+///
+/// <b>Caution:</b> The value of <tt>*out_install_status</tt> should only be
+/// considered when #AR_SUCCESS is returned. Otherwise this value must be
+/// ignored.
+///
+/// @param[in] env The application's @c JNIEnv object
+/// @param[in] application_activity A @c jobject referencing the application's
+/// current Android @c Activity.
+/// @param[in] user_requested_install if set, override the previous installation
+/// failure message and always show the installation interface.
+/// @param[out] out_install_status A pointer to an ArInstallStatus to receive
+/// the resulting install status, if successful. Note: this value is only
+/// valid with the return value is #AR_SUCCESS.
+/// @return #AR_SUCCESS, or any of:
+/// - #AR_ERROR_FATAL if an error occurs while checking for or requesting
+/// installation
+/// - #AR_UNAVAILABLE_DEVICE_NOT_COMPATIBLE if ARCore is not supported
+/// on this device.
+/// - #AR_UNAVAILABLE_USER_DECLINED_INSTALLATION if the user previously declined
+/// installation.
+ArStatus ArCoreApk_requestInstall(void *env,
+ void *application_activity,
+ bool user_requested_install,
+ ArInstallStatus *out_install_status);
+
+/// Initiates installation of ARCore if required, with configurable behavior.
+///
+/// This is a more flexible version of ArCoreApk_requestInstall() allowing the
+/// application control over the initial informational dialog and ease of
+/// exiting or cancelling the installation.
+///
+/// See ArCoreApk_requestInstall() for details of use and behavior.
+///
+/// May be called prior to ArSession_create().
+///
+/// @param[in] env The application's @c JNIEnv object
+/// @param[in] application_activity A @c jobject referencing the application's
+/// current Android @c Activity.
+/// @param[in] user_requested_install if set, override the previous installation
+/// failure message and always show the installation interface.
+/// @param[in] install_behavior controls the presence of the cancel button at
+/// the user education screen and if tapping outside the education screen or
+/// install-in-progress screen causes them to dismiss.
+/// @param[in] message_type controls the text of the of message displayed
+/// before showing the install prompt, or disables display of this message.
+/// @param[out] out_install_status A pointer to an ArInstallStatus to receive
+/// the resulting install status, if successful. Note: this value is only
+/// valid with the return value is #AR_SUCCESS.
+/// @return #AR_SUCCESS, or any of:
+/// - #AR_ERROR_FATAL if an error occurs while checking for or requesting
+/// installation
+/// - #AR_UNAVAILABLE_DEVICE_NOT_COMPATIBLE if ARCore is not supported
+/// on this device.
+/// - #AR_UNAVAILABLE_USER_DECLINED_INSTALLATION if the user previously declined
+/// installation.
+ArStatus ArCoreApk_requestInstallCustom(void *env,
+ void *application_activity,
+ int32_t user_requested_install,
+ ArInstallBehavior install_behavior,
+ ArInstallUserMessageType message_type,
+ ArInstallStatus *out_install_status);
+
+/// @}
/// @addtogroup session
/// @{
@@ -481,11 +739,12 @@
/// @return #AR_SUCCESS or any of:
/// - #AR_UNAVAILABLE_ARCORE_NOT_INSTALLED
/// - #AR_UNAVAILABLE_DEVICE_NOT_COMPATIBLE
-/// - #AR_UNAVAILABLE_ANDROID_VERSION_NOT_SUPPORTED
/// - #AR_UNAVAILABLE_APK_TOO_OLD
/// - #AR_UNAVAILABLE_SDK_TOO_OLD
-ArStatus ArSession_create(void* env, void* application_context,
- ArSession** out_session_pointer);
+/// - #AR_ERROR_CAMERA_PERMISSION_NOT_GRANTED
+ArStatus ArSession_create(void *env,
+ void *application_context,
+ ArSession **out_session_pointer);
/// @}
@@ -498,42 +757,47 @@
/// configuration. Plane detection and lighting estimation are enabled, and
/// blocking update is selected. This configuration is guaranteed to be
/// supported on all devices that support ARCore.
-void ArConfig_create(const ArSession* session, ArConfig** out_config);
+void ArConfig_create(const ArSession *session, ArConfig **out_config);
/// Releases memory used by the provided configuration object.
-void ArConfig_destroy(ArConfig* config);
+void ArConfig_destroy(ArConfig *config);
/// Stores the currently configured lighting estimation mode into
/// @c *light_estimation_mode.
void ArConfig_getLightEstimationMode(
- const ArSession* session, const ArConfig* config,
- ArLightEstimationMode* light_estimation_mode);
+ const ArSession *session,
+ const ArConfig *config,
+ ArLightEstimationMode *light_estimation_mode);
/// Sets the lighting estimation mode that should be used. See
/// ::ArLightEstimationMode for available options.
void ArConfig_setLightEstimationMode(
- const ArSession* session, ArConfig* config,
+ const ArSession *session,
+ ArConfig *config,
ArLightEstimationMode light_estimation_mode);
/// Stores the currently configured plane finding mode into
/// @c *plane_finding_mode.
-void ArConfig_getPlaneFindingMode(const ArSession* session,
- const ArConfig* config,
- ArPlaneFindingMode* plane_finding_mode);
+void ArConfig_getPlaneFindingMode(const ArSession *session,
+ const ArConfig *config,
+ ArPlaneFindingMode *plane_finding_mode);
/// Sets the plane finding mode that should be used. See
/// ::ArPlaneFindingMode for available options.
-void ArConfig_setPlaneFindingMode(const ArSession* session, ArConfig* config,
+void ArConfig_setPlaneFindingMode(const ArSession *session,
+ ArConfig *config,
ArPlaneFindingMode plane_finding_mode);
/// Stores the currently configured behavior of @ref ArSession_update() into
/// @c *update_mode.
-void ArConfig_getUpdateMode(const ArSession* session, const ArConfig* config,
- ArUpdateMode* update_mode);
+void ArConfig_getUpdateMode(const ArSession *session,
+ const ArConfig *config,
+ ArUpdateMode *update_mode);
/// Sets the behavior of @ref ArSession_update(). See
/// ::ArUpdateMode for available options.
-void ArConfig_setUpdateMode(const ArSession* session, ArConfig* config,
+void ArConfig_setUpdateMode(const ArSession *session,
+ ArConfig *config,
ArUpdateMode update_mode);
/// @}
@@ -544,7 +808,7 @@
/// @{
/// Releases resources used by an ARCore session.
-void ArSession_destroy(ArSession* session);
+void ArSession_destroy(ArSession *session);
/// Checks if the provided configuration is usable on the this device. If this
/// method returns #AR_ERROR_UNSUPPORTED_CONFIGURATION, calls to
@@ -554,16 +818,16 @@
/// @param[in] config The configuration to test
/// @return #AR_SUCCESS indicating the configuration is supported, or
/// #AR_ERROR_UNSUPPORTED_CONFIGURATION otherwise.
-ArStatus ArSession_checkSupported(const ArSession* session,
- const ArConfig* config);
+ArStatus ArSession_checkSupported(const ArSession *session,
+ const ArConfig *config);
-/// Configures the session with the given config. The session must be paused.
+/// Configures the session with the given config.
///
/// @return #AR_SUCCESS or any of:
/// - #AR_ERROR_FATAL
/// - #AR_ERROR_UNSUPPORTED_CONFIGURATION
/// - #AR_ERROR_SESSION_NOT_PAUSED
-ArStatus ArSession_configure(ArSession* session, const ArConfig* config);
+ArStatus ArSession_configure(ArSession *session, const ArConfig *config);
/// Starts or resumes the ARCore Session.
///
@@ -574,7 +838,8 @@
/// @returns #AR_SUCCESS or any of:
/// - #AR_ERROR_FATAL
/// - #AR_ERROR_CAMERA_PERMISSION_NOT_GRANTED
-ArStatus ArSession_resume(ArSession* session);
+/// - #AR_ERROR_CAMERA_NOT_AVAILABLE
+ArStatus ArSession_resume(ArSession *session);
/// Pause the current session. This method will stop the camera feed and release
/// resources. The session can be restarted again by calling ArSession_resume().
@@ -585,14 +850,14 @@
///
/// @returns #AR_SUCCESS or any of:
/// - #AR_ERROR_FATAL
-ArStatus ArSession_pause(ArSession* session);
+ArStatus ArSession_pause(ArSession *session);
/// Sets the OpenGL texture name (id) that will allow GPU access to the camera
/// image. The provided ID should have been created with @c glGenTextures(). The
/// resulting texture must be bound to the @c GL_TEXTURE_EXTERNAL_OES target for
/// use. Shaders accessing this texture must use a @c samplerExternalOES
/// sampler. See sample code for an example.
-void ArSession_setCameraTextureName(ArSession* session, uint32_t texture_id);
+void ArSession_setCameraTextureName(ArSession *session, uint32_t texture_id);
/// Sets the aspect ratio, coordinate scaling, and display rotation. This data
/// is used by UV conversion, projection matrix generation, and hit test logic.
@@ -606,8 +871,10 @@
/// @c ROTATION_270
/// @param[in] width Width of the view, in pixels
/// @param[in] height Height of the view, in pixels
-void ArSession_setDisplayGeometry(ArSession* session, int rotation, int width,
- int height);
+void ArSession_setDisplayGeometry(ArSession *session,
+ int32_t rotation,
+ int32_t width,
+ int32_t height);
/// Updates the state of the ARCore system. This includes: receiving a new
/// camera frame, updating the location of the device, updating the location of
@@ -635,7 +902,8 @@
/// - #AR_ERROR_SESSION_PAUSED
/// - #AR_ERROR_TEXTURE_NOT_SET
/// - #AR_ERROR_MISSING_GL_CONTEXT
-ArStatus ArSession_update(ArSession* session, ArFrame* out_frame);
+/// - #AR_ERROR_CAMERA_NOT_AVAILABLE - camera was removed during runtime.
+ArStatus ArSession_update(ArSession *session, ArFrame *out_frame);
/// Defines a tracked location in the physical world.
///
@@ -643,8 +911,9 @@
/// - #AR_ERROR_NOT_TRACKING
/// - #AR_ERROR_SESSION_PAUSED
/// - #AR_ERROR_RESOURCE_EXHAUSTED
-ArStatus ArSession_acquireNewAnchor(ArSession* session, const ArPose* pose,
- ArAnchor** out_anchor);
+ArStatus ArSession_acquireNewAnchor(ArSession *session,
+ const ArPose *pose,
+ ArAnchor **out_anchor);
/// Returns all known anchors, including those not currently tracked. Anchors
/// forgotten by ARCore due to a call to ArAnchor_detach() or entering the
@@ -654,8 +923,8 @@
/// @param[inout] out_anchor_list The list to fill. This list must have already
/// been allocated with ArAnchorList_create(). If previously used, the list
/// will first be cleared.
-void ArSession_getAllAnchors(const ArSession* session,
- ArAnchorList* out_anchor_list);
+void ArSession_getAllAnchors(const ArSession *session,
+ ArAnchorList *out_anchor_list);
/// Returns the list of all known @ref trackable "trackables". This includes
/// ::ArPlane objects if plane detection is enabled, as well as ::ArPoint
@@ -668,15 +937,15 @@
/// @param[inout] out_trackable_list The list to fill. This list must have
/// already been allocated with ArTrackableList_create(). If previously
/// used, the list will first be cleared.
-void ArSession_getAllTrackables(const ArSession* session,
+void ArSession_getAllTrackables(const ArSession *session,
ArTrackableType filter_type,
- ArTrackableList* out_trackable_list);
+ ArTrackableList *out_trackable_list);
/// @}
// === ArPose methods ===
-/// @addtogroup common
+/// @addtogroup pose
/// @{
/// Allocates and initializes a new pose object. @c pose_raw points to an array
@@ -689,19 +958,21 @@
/// The order of the values is: qx, qy, qz, qw, tx, ty, tz.
///
/// If @c pose_raw is null, initializes with the identity pose.
-void ArPose_create(const ArSession* session, const float* pose_raw,
- ArPose** out_pose);
+void ArPose_create(const ArSession *session,
+ const float *pose_raw,
+ ArPose **out_pose);
/// Releases memory used by a pose object.
-void ArPose_destroy(ArPose* pose);
+void ArPose_destroy(ArPose *pose);
/// Extracts the quaternion rotation and translation from a pose object.
/// @param[in] session The ARCore session
/// @param[in] pose The pose to extract
/// @param[out] out_pose_raw Pointer to an array of 7 floats, to be filled with
/// the quaternion rotation and translation as described in ArPose_create().
-void ArPose_getPoseRaw(const ArSession* session, const ArPose* pose,
- float* out_pose_raw);
+void ArPose_getPoseRaw(const ArSession *session,
+ const ArPose *pose,
+ float *out_pose_raw);
/// Converts a pose into a 4x4 transformation matrix.
/// @param[in] session The ARCore session
@@ -709,14 +980,15 @@
/// @param[out] out_matrix_col_major_4x4 Pointer to an array of 16 floats, to be
/// filled with a column-major homogenous transformation matrix, as used by
/// OpenGL.
-void ArPose_getMatrix(const ArSession* session, const ArPose* pose,
- float* out_matrix_col_major_4x4);
+void ArPose_getMatrix(const ArSession *session,
+ const ArPose *pose,
+ float *out_matrix_col_major_4x4);
/// @}
// === ArCamera methods ===
-/// @addtogroup frame
+/// @addtogroup camera
/// @{
/// Sets @c out_pose to the pose of the user's device in the world coordinate
@@ -732,8 +1004,9 @@
/// @param[in] camera The session's camera (retrieved from any frame).
/// @param[inout] out_pose An already-allocated ArPose object into which the
/// pose will be stored.
-void ArCamera_getPose(const ArSession* session, const ArCamera* camera,
- ArPose* out_pose);
+void ArCamera_getPose(const ArSession *session,
+ const ArCamera *camera,
+ ArPose *out_pose);
/// Sets @c out_pose to the pose of the user's device in the world coordinate
/// space at the time of capture of the current camera texture. The position of
@@ -751,8 +1024,9 @@
/// @param[in] camera The session's camera (retrieved from any frame).
/// @param[inout] out_pose An already-allocated ArPose object into which the
/// pose will be stored.
-void ArCamera_getDisplayOrientedPose(const ArSession* session,
- const ArCamera* camera, ArPose* out_pose);
+void ArCamera_getDisplayOrientedPose(const ArSession *session,
+ const ArCamera *camera,
+ ArPose *out_pose);
/// Returns the view matrix for the camera for this frame. This matrix performs
/// the inverse transfrom as the pose provided by
@@ -763,14 +1037,16 @@
/// @param[inout] out_col_major_4x4 Pointer to an array of 16 floats, to be
/// filled with a column-major homogenous transformation matrix, as used by
/// OpenGL.
-void ArCamera_getViewMatrix(const ArSession* session, const ArCamera* camera,
- float* out_col_major_4x4);
+void ArCamera_getViewMatrix(const ArSession *session,
+ const ArCamera *camera,
+ float *out_col_major_4x4);
/// Gets the current state of the pose of this camera. If this state is anything
/// other than #AR_TRACKING_STATE_TRACKING the Camera's pose should not be
/// considered useful.
-void ArCamera_getTrackingState(const ArSession* session, const ArCamera* camera,
- ArTrackingState* out_tracking_state);
+void ArCamera_getTrackingState(const ArSession *session,
+ const ArCamera *camera,
+ ArTrackingState *out_tracking_state);
/// Computes a projection matrix for rendering virtual content on top of the
/// camera image. Note that the projection matrix reflects the current display
@@ -783,40 +1059,48 @@
/// @param[inout] dest_col_major_4x4 Pointer to an array of 16 floats, to
/// be filled with a column-major homogenous transformation matrix, as used
/// by OpenGL.
-void ArCamera_getProjectionMatrix(const ArSession* session,
- const ArCamera* camera, float near, float far,
- float* dest_col_major_4x4);
+void ArCamera_getProjectionMatrix(const ArSession *session,
+ const ArCamera *camera,
+ float near,
+ float far,
+ float *dest_col_major_4x4);
/// Releases a reference to the camera. This must match a call to
/// ArFrame_acquireCamera().
///
/// This method may safely be called with @c nullptr - it will do nothing.
-void ArCamera_release(ArCamera* camera);
+void ArCamera_release(ArCamera *camera);
+
+/// @}
// === ArFrame methods ===
+/// @addtogroup frame
+/// @{
+
/// Allocates a new ArFrame object, storing the pointer into @c *out_frame.
///
/// Note: the same ArFrame can be used repeatedly when calling ArSession_update.
-void ArFrame_create(const ArSession* session, ArFrame** out_frame);
+void ArFrame_create(const ArSession *session, ArFrame **out_frame);
/// Releases an ArFrame and any references it holds.
-void ArFrame_destroy(ArFrame* frame);
+void ArFrame_destroy(ArFrame *frame);
/// Checks if the display rotation or viewport geometry changed since the
/// previous call to ArSession_update(). The application should re-query
/// ArCamera_getProjectionMatrix() and ArFrame_transformDisplayUvCoords()
/// whenever this emits non-zero.
-void ArFrame_getDisplayGeometryChanged(const ArSession* session,
- const ArFrame* frame,
- int32_t* out_geometry_changed);
+void ArFrame_getDisplayGeometryChanged(const ArSession *session,
+ const ArFrame *frame,
+ int32_t *out_geometry_changed);
/// Returns the timestamp in nanoseconds when this image was captured. This can
/// be used to detect dropped frames or measure the camera frame rate. The time
/// base of this value is specifically <b>not</b> defined, but it is likely
/// similar to <tt>clock_gettime(CLOCK_BOOTTIME)</tt>.
-void ArFrame_getTimestamp(const ArSession* session, const ArFrame* frame,
- int64_t* out_timestamp_ns);
+void ArFrame_getTimestamp(const ArSession *session,
+ const ArFrame *frame,
+ int64_t *out_timestamp_ns);
/// Transform the given texture coordinates to correctly show the background
/// image. This will account for the display rotation, and any additional
@@ -830,10 +1114,11 @@
/// least this many floats.
/// @param[in] uvs_in Input UV coordinates in normalized screen space.
/// @param[inout] uvs_out Output UV coordinates in texture coordinates.
-void ArFrame_transformDisplayUvCoords(const ArSession* session,
- const ArFrame* frame,
- int32_t num_elements, const float* uvs_in,
- float* uvs_out);
+void ArFrame_transformDisplayUvCoords(const ArSession *session,
+ const ArFrame *frame,
+ int32_t num_elements,
+ const float *uvs_in,
+ float *uvs_out);
/// Performs a ray cast from the user's device in the direction of the given
/// location in the camera view. Intersections with detected scene geometry are
@@ -861,9 +1146,11 @@
/// @param[inout] hit_result_list The list to fill. This list must have been
/// previously allocated using ArHitResultList_create(). If the list has
/// been previously used, it will first be cleared.
-void ArFrame_hitTest(const ArSession* session, const ArFrame* frame,
- float pixel_x, float pixel_y,
- ArHitResultList* hit_result_list);
+void ArFrame_hitTest(const ArSession *session,
+ const ArFrame *frame,
+ float pixel_x,
+ float pixel_y,
+ ArHitResultList *hit_result_list);
/// Gets the current ambient light estimate, if light estimation was enabled.
///
@@ -871,8 +1158,9 @@
/// @param[in] frame The current frame.
/// @param[inout] out_light_estimate The light estimate to fill. This object
/// must have been previously created with ArLightEstimate_create().
-void ArFrame_getLightEstimate(const ArSession* session, const ArFrame* frame,
- ArLightEstimate* out_light_estimate);
+void ArFrame_getLightEstimate(const ArSession *session,
+ const ArFrame *frame,
+ ArLightEstimate *out_light_estimate);
/// Acquires the current set of estimated 3d points attached to real-world
/// geometry. A matching call to PointCloud_release() must be made when the
@@ -890,15 +1178,16 @@
/// - #AR_ERROR_DEADLINE_EXCEEDED if @c frame is not the latest frame from
/// by ArSession_update().
/// - #AR_ERROR_RESOURCE_EXHAUSTED if too many point clouds are currently held.
-ArStatus ArFrame_acquirePointCloud(const ArSession* session,
- const ArFrame* frame,
- ArPointCloud** out_point_cloud);
+ArStatus ArFrame_acquirePointCloud(const ArSession *session,
+ const ArFrame *frame,
+ ArPointCloud **out_point_cloud);
/// Returns the camera object for the session. Note that this Camera instance is
/// long-lived so the same instance is returned regardless of the frame object
/// this method was called on.
-void ArFrame_acquireCamera(const ArSession* session, const ArFrame* frame,
- ArCamera** out_camera);
+void ArFrame_acquireCamera(const ArSession *session,
+ const ArFrame *frame,
+ ArCamera **out_camera);
/// Gets the camera metadata for the current camera image.
///
@@ -906,10 +1195,13 @@
/// - #AR_ERROR_DEADLINE_EXCEEDED if @c frame is not the latest frame from
/// by ArSession_update().
/// - #AR_ERROR_RESOURCE_EXHAUSTED if too many metadata objects are currently
-/// held.
-ArStatus ArFrame_acquireImageMetadata(const ArSession* session,
- const ArFrame* frame,
- ArImageMetadata** out_metadata);
+/// held.
+/// - #AR_ERROR_NOT_YET_AVAILABLE if the camera failed to produce metadata for
+/// the given frame. Note: this will commonly happen for few frames right
+/// after @c ArSession_resume() due to the camera stack bringup.
+ArStatus ArFrame_acquireImageMetadata(const ArSession *session,
+ const ArFrame *frame,
+ ArImageMetadata **out_metadata);
/// Gets the set of anchors that were changed by the ArSession_update() that
/// produced this Frame.
@@ -919,8 +1211,9 @@
/// @param[inout] out_anchor_list The list to fill. This list must have
/// already been allocated with ArAnchorList_create(). If previously
/// used, the list will first be cleared.
-void ArFrame_getUpdatedAnchors(const ArSession* session, const ArFrame* frame,
- ArAnchorList* out_anchor_list);
+void ArFrame_getUpdatedAnchors(const ArSession *session,
+ const ArFrame *frame,
+ ArAnchorList *out_anchor_list);
/// Gets the set of trackables of a particular type that were changed by the
/// ArSession_update() call that produced this Frame.
@@ -932,18 +1225,23 @@
/// @param[inout] out_trackable_list The list to fill. This list must have
/// already been allocated with ArTrackableList_create(). If previously
/// used, the list will first be cleared.
-void ArFrame_getUpdatedTrackables(const ArSession* session,
- const ArFrame* frame,
+void ArFrame_getUpdatedTrackables(const ArSession *session,
+ const ArFrame *frame,
ArTrackableType filter_type,
- ArTrackableList* out_trackable_list);
+ ArTrackableList *out_trackable_list);
+
+/// @}
// === ArPointCloud methods ===
+/// @addtogroup pointcloud
+/// @{
+
/// Retrieves the number of points in the point cloud.
///
-void ArPointCloud_getNumberOfPoints(const ArSession* session,
- const ArPointCloud* point_cloud,
- int32_t* out_number_of_points);
+void ArPointCloud_getNumberOfPoints(const ArSession *session,
+ const ArPointCloud *point_cloud,
+ int32_t *out_number_of_points);
/// Retrieves a pointer to the point cloud data.
///
@@ -958,24 +1256,29 @@
/// longer. The points are in world coordinates consistent with the frame it was
/// obtained from. If the number of points is zero, then the value of
/// @c *out_point_cloud_data should is undefined.
-void ArPointCloud_getData(const ArSession* session,
- const ArPointCloud* point_cloud,
- const float** out_point_cloud_data);
+void ArPointCloud_getData(const ArSession *session,
+ const ArPointCloud *point_cloud,
+ const float **out_point_cloud_data);
/// Returns the timestamp in nanoseconds when this point cloud was observed.
/// This timestamp uses the same time base as ArFrame_getTimestamp().
-void ArPointCloud_getTimestamp(const ArSession* session,
- const ArPointCloud* point_cloud,
- int64_t* out_timestamp_ns);
+void ArPointCloud_getTimestamp(const ArSession *session,
+ const ArPointCloud *point_cloud,
+ int64_t *out_timestamp_ns);
/// Releases a reference to the point cloud. This must match a call to
/// ArFrame_acquirePointCloud().
///
/// This method may safely be called with @c nullptr - it will do nothing.
-void ArPointCloud_release(ArPointCloud* point_cloud);
+void ArPointCloud_release(ArPointCloud *point_cloud);
+
+/// @}
// === Image Metadata methods ===
+/// @addtogroup image
+/// @{
+
/// Retrieves the capture metadata for the current camera image.
///
/// @c ACameraMetadata is a struct in Android NDK. Include NdkCameraMetadata.h
@@ -984,61 +1287,69 @@
/// Note: that the ACameraMetadata returned from this function will be invalid
/// after its ArImageMetadata object is released.
void ArImageMetadata_getNdkCameraMetadata(
- const ArSession* session, const ArImageMetadata* image_metadata,
- const ACameraMetadata** out_ndk_metadata);
+ const ArSession *session,
+ const ArImageMetadata *image_metadata,
+ const ACameraMetadata **out_ndk_metadata);
/// Releases a reference to the metadata. This must match a call to
/// ArFrame_acquireImageMetadata().
///
/// This method may safely be called with @c nullptr - it will do nothing.
-void ArImageMetadata_release(ArImageMetadata* metadata);
+void ArImageMetadata_release(ArImageMetadata *metadata);
+
+/// @}
// === ArLightEstimate methods ===
+/// @addtogroup light
+/// @{
+
/// Allocates a light estimate object.
-void ArLightEstimate_create(const ArSession* session,
- ArLightEstimate** out_light_estimate);
+void ArLightEstimate_create(const ArSession *session,
+ ArLightEstimate **out_light_estimate);
/// Releases the provided light estimate object.
-void ArLightEstimate_destroy(ArLightEstimate* light_estimate);
+void ArLightEstimate_destroy(ArLightEstimate *light_estimate);
/// Retrieves the validity state of a light estimate. If the resulting value of
/// @c *out_light_estimate_state is not #AR_LIGHT_ESTIMATE_STATE_VALID, the
/// estimate should not be used for rendering.
-void ArLightEstimate_getState(const ArSession* session,
- const ArLightEstimate* light_estimate,
- ArLightEstimateState* out_light_estimate_state);
+void ArLightEstimate_getState(const ArSession *session,
+ const ArLightEstimate *light_estimate,
+ ArLightEstimateState *out_light_estimate_state);
/// Retrieves the pixel intensity of the current camera view. Values are in the
/// range (0.0, 1.0), with zero being black and one being white.
-void ArLightEstimate_getPixelIntensity(const ArSession* session,
- const ArLightEstimate* light_estimate,
- float* out_pixel_intensity);
+void ArLightEstimate_getPixelIntensity(const ArSession *session,
+ const ArLightEstimate *light_estimate,
+ float *out_pixel_intensity);
/// @}
// === ArAnchorList methods ===
-/// @addtogroup trackable
+/// @addtogroup anchor
/// @{
/// Creates an anchor list object.
-void ArAnchorList_create(const ArSession* session,
- ArAnchorList** out_anchor_list);
+void ArAnchorList_create(const ArSession *session,
+ ArAnchorList **out_anchor_list);
/// Releases the memory used by an anchor list object, along with all the anchor
/// references it holds.
-void ArAnchorList_destroy(ArAnchorList* anchor_list);
+void ArAnchorList_destroy(ArAnchorList *anchor_list);
/// Retrieves the number of anchors in this list.
-void ArAnchorList_getSize(const ArSession* session,
- const ArAnchorList* anchor_list, int32_t* out_size);
+void ArAnchorList_getSize(const ArSession *session,
+ const ArAnchorList *anchor_list,
+ int32_t *out_size);
/// Acquires a reference to an indexed entry in the list. This call must
/// eventually be matched with a call to ArAnchor_release().
-void ArAnchorList_acquireItem(const ArSession* session,
- const ArAnchorList* anchor_list, int32_t index,
- ArAnchor** out_anchor);
+void ArAnchorList_acquireItem(const ArSession *session,
+ const ArAnchorList *anchor_list,
+ int32_t index,
+ ArAnchor **out_anchor);
// === ArAnchor methods ===
@@ -1051,45 +1362,53 @@
/// @param[in] anchor The anchor to retrieve the pose of.
/// @param[inout] out_pose An already-allocated ArPose object into which the
/// pose will be stored.
-void ArAnchor_getPose(const ArSession* session, const ArAnchor* anchor,
- ArPose* out_pose);
+void ArAnchor_getPose(const ArSession *session,
+ const ArAnchor *anchor,
+ ArPose *out_pose);
/// Retrieves the current state of the pose of this anchor.
-void ArAnchor_getTrackingState(const ArSession* session, const ArAnchor* anchor,
- ArTrackingState* out_tracking_state);
+void ArAnchor_getTrackingState(const ArSession *session,
+ const ArAnchor *anchor,
+ ArTrackingState *out_tracking_state);
/// Tells ARCore to stop tracking and forget this anchor. This call does not
/// release the reference to the anchor - that must be done separately using
/// ArAnchor_release().
-void ArAnchor_detach(ArSession* session, ArAnchor* anchor);
+void ArAnchor_detach(ArSession *session, ArAnchor *anchor);
/// Releases a reference to an anchor. This does not mean that the anchor will
/// stop tracking, as it will be obtainable from e.g. ArSession_getAllAnchors()
/// if any other references exist.
///
/// This method may safely be called with @c nullptr - it will do nothing.
-void ArAnchor_release(ArAnchor* anchor);
+void ArAnchor_release(ArAnchor *anchor);
+
+/// @}
// === ArTrackableList methods ===
+/// @addtogroup trackable
+/// @{
+
/// Creates a trackable list object.
-void ArTrackableList_create(const ArSession* session,
- ArTrackableList** out_trackable_list);
+void ArTrackableList_create(const ArSession *session,
+ ArTrackableList **out_trackable_list);
/// Releases the memory used by a trackable list object, along with all the
/// anchor references it holds.
-void ArTrackableList_destroy(ArTrackableList* trackable_list);
+void ArTrackableList_destroy(ArTrackableList *trackable_list);
/// Retrieves the number of trackables in this list.
-void ArTrackableList_getSize(const ArSession* session,
- const ArTrackableList* trackable_list,
- int32_t* out_size);
+void ArTrackableList_getSize(const ArSession *session,
+ const ArTrackableList *trackable_list,
+ int32_t *out_size);
/// Acquires a reference to an indexed entry in the list. This call must
/// eventually be matched with a call to ArTrackable_release().
-void ArTrackableList_acquireItem(const ArSession* session,
- const ArTrackableList* trackable_list,
- int32_t index, ArTrackable** out_trackable);
+void ArTrackableList_acquireItem(const ArSession *session,
+ const ArTrackableList *trackable_list,
+ int32_t index,
+ ArTrackable **out_trackable);
// === ArTrackable methods ===
@@ -1098,17 +1417,18 @@
/// from other calls, for example ArSession_getAllTrackables().
///
/// This method may safely be called with @c nullptr - it will do nothing.
-void ArTrackable_release(ArTrackable* trackable);
+void ArTrackable_release(ArTrackable *trackable);
/// Retrieves the type of the trackable. See ::ArTrackableType for valid types.
-void ArTrackable_getType(const ArSession* session, const ArTrackable* trackable,
- ArTrackableType* out_trackable_type);
+void ArTrackable_getType(const ArSession *session,
+ const ArTrackable *trackable,
+ ArTrackableType *out_trackable_type);
/// Retrieves the current state of ARCore's knowledge of the pose of this
/// trackable.
-void ArTrackable_getTrackingState(const ArSession* session,
- const ArTrackable* trackable,
- ArTrackingState* out_tracking_state);
+void ArTrackable_getTrackingState(const ArSession *session,
+ const ArTrackable *trackable,
+ ArTrackingState *out_tracking_state);
/// Creates aa Anchor at the given pose in the world coordinate space, attached
/// to this Trackable, and acquires a reference to it. The type of Trackable
@@ -1122,9 +1442,10 @@
/// #AR_TRACKING_STATE_TRACKING
/// - #AR_ERROR_SESSION_PAUSED if the session was paused
/// - #AR_ERROR_RESOURCE_EXHAUSTED if too many anchors exist
-ArStatus ArTrackable_acquireNewAnchor(ArSession* session,
- ArTrackable* trackable, ArPose* pose,
- ArAnchor** out_anchor);
+ArStatus ArTrackable_acquireNewAnchor(ArSession *session,
+ ArTrackable *trackable,
+ ArPose *pose,
+ ArAnchor **out_anchor);
/// Gets the set of anchors attached to this trackable.
///
@@ -1133,12 +1454,17 @@
/// @param[inout] out_anchor_list The list to fill. This list must have
/// already been allocated with ArAnchorList_create(). If previously
/// used, the list will first be cleared.
-void ArTrackable_getAnchors(const ArSession* session,
- const ArTrackable* trackable,
- ArAnchorList* out_anchor_list);
+void ArTrackable_getAnchors(const ArSession *session,
+ const ArTrackable *trackable,
+ ArAnchorList *out_anchor_list);
+
+/// @}
// === ArPlane methods ===
+/// @addtogroup plane
+/// @{
+
/// Acquires a reference to the plane subsuming this plane.
///
/// Two or more planes may be automatically merged into a single parent plane,
@@ -1152,12 +1478,14 @@
///
/// Note: this function will set @c *out_subsumed_by to NULL if the plane is not
/// subsumed.
-void ArPlane_acquireSubsumedBy(const ArSession* session, const ArPlane* plane,
- ArPlane** out_subsumed_by);
+void ArPlane_acquireSubsumedBy(const ArSession *session,
+ const ArPlane *plane,
+ ArPlane **out_subsumed_by);
/// Retrieves the type (orientation) of the plane. See ::ArPlaneType.
-void ArPlane_getType(const ArSession* session, const ArPlane* plane,
- ArPlaneType* out_plane_type);
+void ArPlane_getType(const ArSession *session,
+ const ArPlane *plane,
+ ArPlaneType *out_plane_type);
/// Returns the pose of the center of the detected plane. The pose's transformed
/// +Y axis will be point normal out of the plane, with the +X and +Z axes
@@ -1167,25 +1495,29 @@
/// @param[in] plane The plane for which to retrieve center pose.
/// @param[inout] out_pose An already-allocated ArPose object into which the
/// pose will be stored.
-void ArPlane_getCenterPose(const ArSession* session, const ArPlane* plane,
- ArPose* out_pose);
+void ArPlane_getCenterPose(const ArSession *session,
+ const ArPlane *plane,
+ ArPose *out_pose);
/// Retrieves the length of this plane's bounding rectangle measured along the
/// local X-axis of the coordinate space defined by the output of
/// ArPlane_getCenterPose().
-void ArPlane_getExtentX(const ArSession* session, const ArPlane* plane,
- float* out_extent_x);
+void ArPlane_getExtentX(const ArSession *session,
+ const ArPlane *plane,
+ float *out_extent_x);
/// Retrieves the length of this plane's bounding rectangle measured along the
/// local Z-axis of the coordinate space defined by the output of
/// ArPlane_getCenterPose().
-void ArPlane_getExtentZ(const ArSession* session, const ArPlane* plane,
- float* out_extent_z);
+void ArPlane_getExtentZ(const ArSession *session,
+ const ArPlane *plane,
+ float *out_extent_z);
/// Retrieves the number of elements (not vertices) in the boundary polygon.
/// The number of vertices is 1/2 this size.
-void ArPlane_getPolygonSize(const ArSession* session, const ArPlane* plane,
- int32_t* out_polygon_size);
+void ArPlane_getPolygonSize(const ArSession *session,
+ const ArPlane *plane,
+ int32_t *out_polygon_size);
/// Returns the 2D vertices of a convex polygon approximating the detected
/// plane, in the form <tt>[x1, z1, x2, z2, ...]</tt>. These X-Z values are in
@@ -1196,51 +1528,76 @@
/// @param[in] plane The plane to retrieve the polygon from.
/// @param[inout] out_polygon_xz A pointer to an array of floats. The length of
/// this array must be at least that reported by ArPlane_getPolygonSize().
-void ArPlane_getPolygon(const ArSession* session, const ArPlane* plane,
- float* out_polygon_xz);
+void ArPlane_getPolygon(const ArSession *session,
+ const ArPlane *plane,
+ float *out_polygon_xz);
/// Sets @c *out_pose_in_extents to non-zero if the given pose (usually obtained
/// from a HitResult) is in the plane's rectangular extents.
-void ArPlane_isPoseInExtents(const ArSession* session, const ArPlane* plane,
- const ArPose* pose, int32_t* out_pose_in_extents);
+void ArPlane_isPoseInExtents(const ArSession *session,
+ const ArPlane *plane,
+ const ArPose *pose,
+ int32_t *out_pose_in_extents);
/// Sets @c *out_pose_in_extents to non-zero if the given pose (usually obtained
/// from a HitResult) is in the plane's polygon.
-void ArPlane_isPoseInPolygon(const ArSession* session, const ArPlane* plane,
- const ArPose* pose, int32_t* out_pose_in_polygon);
+void ArPlane_isPoseInPolygon(const ArSession *session,
+ const ArPlane *plane,
+ const ArPose *pose,
+ int32_t *out_pose_in_polygon);
+
+/// @}
// === ArPoint methods ===
-/// Returns the pose of the point. For @c Point objects created by
-/// ArFrame_hitTest(), the orientation will follow the behavior described in
-/// ArHitResult_getPose().
-///
+/// @addtogroup point
+/// @{
+
+/// Returns the pose of the point.
+/// If ArPoint_getOrientationMode() returns ESTIMATED_SURFACE_NORMAL, the
+/// orientation will follow the behavior described in ArHitResult_getHitPose().
+/// If ArPoint_getOrientationMode() returns INITIALIZED_TO_IDENTITY, then
+/// returns an orientation that is identity or close to identity.
/// @param[in] session The ARCore session.
/// @param[in] point The point to retrieve the pose of.
/// @param[inout] out_pose An already-allocated ArPose object into which the
-/// pose will be stored.
-void ArPoint_getPose(const ArSession* session, const ArPoint* point,
- ArPose* out_pose);
+/// pose will be stored.
+void ArPoint_getPose(const ArSession *session,
+ const ArPoint *point,
+ ArPose *out_pose);
+
+/// Returns the OrientationMode of the point. For @c Point objects created by
+/// ArFrame_hitTest().
+/// If OrientationMode is ESTIMATED_SURFACE_NORMAL, then normal of the surface
+/// centered around the ArPoint was estimated succesfully.
+///
+/// @param[in] session The ARCore session.
+/// @param[in] point The point to retrieve the pose of.
+/// @param[inout] out_orientation_mode OrientationMode output result for the
+/// the point.
+void ArPoint_getOrientationMode(const ArSession *session,
+ const ArPoint *point,
+ ArPointOrientationMode *out_orientation_mode);
/// @}
// === ArHitResultList methods ===
-/// @addtogroup frame
+/// @addtogroup hit
/// @{
/// Creates a hit result list object.
-void ArHitResultList_create(const ArSession* session,
- ArHitResultList** out_hit_result_list);
+void ArHitResultList_create(const ArSession *session,
+ ArHitResultList **out_hit_result_list);
/// Releases the memory used by a hit result list object, along with all the
/// trackable references it holds.
-void ArHitResultList_destroy(ArHitResultList* hit_result_list);
+void ArHitResultList_destroy(ArHitResultList *hit_result_list);
/// Retrieves the number of hit results in this list.
-void ArHitResultList_getSize(const ArSession* session,
- const ArHitResultList* hit_result_list,
- int32_t* out_size);
+void ArHitResultList_getSize(const ArSession *session,
+ const ArHitResultList *hit_result_list,
+ int32_t *out_size);
/// Copies an indexed entry in the list. This acquires a reference to any
/// trackable referenced by the item, and releases any reference currently held
@@ -1251,23 +1608,24 @@
/// @param[in] index Index of the entry to copy.
/// @param[inout] out_hit_result An already-allocated ArHitResult object into
/// which the result will be copied.
-void ArHitResultList_getItem(const ArSession* session,
- const ArHitResultList* hit_result_list,
- int32_t index, ArHitResult* out_hit_result);
+void ArHitResultList_getItem(const ArSession *session,
+ const ArHitResultList *hit_result_list,
+ int32_t index,
+ ArHitResult *out_hit_result);
// === ArHitResult methods ===
/// Allocates an empty hit result object.
-void ArHitResult_create(const ArSession* session, ArHitResult** out_hit_result);
+void ArHitResult_create(const ArSession *session, ArHitResult **out_hit_result);
/// Releases the memory used by a hit result object, along with any
/// trackable reference it holds.
-void ArHitResult_destroy(ArHitResult* hit_result);
+void ArHitResult_destroy(ArHitResult *hit_result);
/// Returns the distance from the camera to the hit location, in meters.
-void ArHitResult_getDistance(const ArSession* session,
- const ArHitResult* hit_result,
- float* out_distance);
+void ArHitResult_getDistance(const ArSession *session,
+ const ArHitResult *hit_result,
+ float *out_distance);
/// Returns the pose of the intersection between a ray and detected real-world
/// geometry. The position is the location in space where the ray intersected
@@ -1279,11 +1637,19 @@
/// planes), and Z+ is parallel to the plane, pointing roughly toward the
/// user's device.
///
-/// ::ArPoint : X+ is perpendicular to the cast ray and points right from the
-/// perspective of the user's device, Y+ points up, and Z+ points roughly toward
-/// the user's device.
+/// ::ArPoint :
+/// Attempt to estimate the normal of the surface centered around the hit test.
+/// Surface normal estimation is most likely to succeed on textured surfaces
+/// and with camera motion.
+/// If ArPoint_getOrientationMode() returns ESTIMATED_SURFACE_NORMAL,
+/// then X+ is perpendicular to the cast ray and parallel to the physical
+/// surface centered around the hit test, Y+ points along the estimated surface
+/// normal, and Z+ points roughly toward the user's device. If
+/// ArPoint_getOrientationMode() returns INITIALIZED_TO_IDENTITY, then X+ is
+/// perpendicular to the cast ray and points right from the perspective of the
+/// user's device, Y+ points up, and Z+ points roughly toward the user's device.
///
-/// If you wish to retain the location of this pose beyond the duration of a
+/// If you wish to retain the location of this pose beyond the duration of aÎ
/// single frame, create an anchor using ArHitResult_createAnchor() to save the
/// pose in a physically consistent way.
///
@@ -1291,14 +1657,15 @@
/// @param[in] hit_result The hit result to retrieve the pose of.
/// @param[inout] out_pose An already-allocated ArPose object into which the
/// pose will be stored.
-void ArHitResult_getHitPose(const ArSession* session,
- const ArHitResult* hit_result, ArPose* out_pose);
+void ArHitResult_getHitPose(const ArSession *session,
+ const ArHitResult *hit_result,
+ ArPose *out_pose);
-/// Acquires reference to the hit trackable. This call must be paired with a
+/// Acquires reference to the hit trackable. This call must be paired with a
/// call to ArTrackable_release().
-void ArHitResult_acquireTrackable(const ArSession* session,
- const ArHitResult* hit_result,
- ArTrackable** out_trackable);
+void ArHitResult_acquireTrackable(const ArSession *session,
+ const ArHitResult *hit_result,
+ ArTrackable **out_trackable);
/// Creates a new anchor at the hit location. See ArHitResult_getHitPose() for
/// details. This is equivalent to creating an anchor on the hit trackable at
@@ -1310,9 +1677,9 @@
/// - #AR_ERROR_RESOURCE_EXHAUSTED
/// - #AR_ERROR_DEADLINE_EXCEEDED - hit result must be used before the next call
/// to update().
-ArStatus ArHitResult_acquireNewAnchor(ArSession* session,
- ArHitResult* hit_result,
- ArAnchor** out_anchor);
+ArStatus ArHitResult_acquireNewAnchor(ArSession *session,
+ ArHitResult *hit_result,
+ ArAnchor **out_anchor);
/// @}
@@ -1320,5 +1687,4 @@
}
#endif
-
-#endif // ARCORE_C_API_H_
+#endif // THIRD_PARTY_ARCORE_AR_CORE_C_API_ARCORE_C_API_H_
diff --git a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar b/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar
deleted file mode 100644
index 7470bc3..0000000
--- a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar
+++ /dev/null
Binary files differ
diff --git a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar.md5 b/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar.md5
deleted file mode 100644
index 646b1b1..0000000
--- a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar.md5
+++ /dev/null
@@ -1 +0,0 @@
-91b870506b7963c285147e29523418d1
diff --git a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar.sha1 b/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar.sha1
deleted file mode 100644
index b4fd030..0000000
--- a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.aar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0da2caba973a305d880d979ab1cda6af1081747e
diff --git a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom b/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom
deleted file mode 100644
index 3f81dd4..0000000
--- a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom
+++ /dev/null
@@ -1,17 +0,0 @@
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns="http://maven.apache.org/POM/4.0.0"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>com.google.ar</groupId>
- <artifactId>core</artifactId>
- <version>0.91.0</version>
- <packaging>aar</packaging>
- <licenses>
- <license>
- <name>Apache License, Version 2.0</name>
- <url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- <comments>A business-friendly OSS license</comments>
- </license>
-</licenses>
-</project>
diff --git a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom.md5 b/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom.md5
deleted file mode 100644
index 5b7b430..0000000
--- a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom.md5
+++ /dev/null
@@ -1 +0,0 @@
-02e1872e7de92d7237f8a4e4157ee797
diff --git a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom.sha1 b/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom.sha1
deleted file mode 100644
index cbf27a2..0000000
--- a/libraries/m2repository/com/google/ar/core/0.91.0/core-0.91.0.pom.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a5e5a7a3b85f0becb109e0308f0f92ff8c724b8b
diff --git a/samples/computervision/app/build.gradle b/samples/computervision/app/build.gradle
index c08d32a..b86ca61 100644
--- a/samples/computervision/app/build.gradle
+++ b/samples/computervision/app/build.gradle
@@ -21,7 +21,7 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:0.91.0'
+ implementation 'com.google.ar:core:1.0.0'
implementation 'com.android.support:appcompat-v7:27.0.2'
implementation 'com.android.support:design:27.0.2'
diff --git a/samples/computervision/app/src/main/AndroidManifest.xml b/samples/computervision/app/src/main/AndroidManifest.xml
index 1b74db3..890ead4 100644
--- a/samples/computervision/app/src/main/AndroidManifest.xml
+++ b/samples/computervision/app/src/main/AndroidManifest.xml
@@ -18,27 +18,27 @@
xmlns:tools="http://schemas.android.com/tools"
package="com.google.ar.core.examples.java.computervision">
- <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.CAMERA"/>
- <application
- android:allowBackup="false"
- android:icon="@drawable/ic_launcher"
+ <application
+ android:allowBackup="false"
+ android:icon="@drawable/ic_launcher"
+ android:label="@string/app_name"
+ android:theme="@style/AppTheme"
+ android:usesCleartextTraffic="false"
+ tools:ignore="GoogleAppIndexingWarning">
+
+ <activity
+ android:name=".MainActivity"
android:label="@string/app_name"
- android:theme="@style/AppTheme"
- android:usesCleartextTraffic="false"
- tools:ignore="GoogleAppIndexingWarning">
-
- <activity
- android:name=".MainActivity"
- android:label="@string/app_name"
- android:configChanges="orientation|screenSize"
- android:exported="true"
- android:theme="@style/Theme.AppCompat.NoActionBar"
- android:screenOrientation="locked">
- <intent-filter>
- <action android:name="android.intent.action.MAIN" />
- <category android:name="android.intent.category.LAUNCHER" />
- </intent-filter>
- </activity>
- </application>
+ android:configChanges="orientation|screenSize"
+ android:exported="true"
+ android:theme="@style/Theme.AppCompat.NoActionBar"
+ android:screenOrientation="locked">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+ </activity>
+ </application>
</manifest>
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/MainActivity.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/MainActivity.java
index 96481d4..2dbfbff 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/MainActivity.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/MainActivity.java
@@ -16,9 +16,6 @@
package com.google.ar.core.examples.java.computervision;
-import static com.google.ar.core.examples.java.computervision.utility.CameraPermissionHelper.CAMERA_PERMISSION_CODE;
-
-import android.content.pm.PackageManager;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
@@ -27,9 +24,10 @@
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MotionEvent;
+import android.view.Surface;
import android.view.View;
import android.view.WindowManager;
-import com.google.ar.core.Config;
+import android.widget.Toast;
import com.google.ar.core.Frame;
import com.google.ar.core.Session;
import com.google.ar.core.examples.java.computervision.rendering.BackgroundRenderer;
@@ -46,267 +44,275 @@
/** This is a simple example that demonstrates texture reading with ARCore. */
public class MainActivity extends AppCompatActivity implements GLSurfaceView.Renderer {
- private static final String TAG = MainActivity.class.getSimpleName();
+ private static final String TAG = MainActivity.class.getSimpleName();
- // Rendering. The Renderers are created here, and initialized when the GL surface is created.
- private GLSurfaceView mSurfaceView;
+ // Rendering. The Renderers are created here, and initialized when the GL surface is created.
+ private GLSurfaceView surfaceView;
- private Session mSession;
- private Snackbar mMessageSnackbar;
- private DisplayRotationHelper mDisplayRotationHelper;
+ private Session session;
+ private Snackbar messageSnackbar;
+ private DisplayRotationHelper displayRotationHelper;
- private final BackgroundRenderer mBackgroundRenderer = new BackgroundRenderer();
- private final CameraImageBuffer mEdgeImage = new CameraImageBuffer();
- private final TextureReader mTextureReader = new TextureReader();
+ private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
+ private final CameraImageBuffer edgeImage = new CameraImageBuffer();
+ private final TextureReader textureReader = new TextureReader();
- // ArCore full resolution texture has a size of 1920 x 1080.
- private static final int TEXTURE_WIDTH = 1920;
- private static final int TEXTURE_HEIGHT = 1080;
+ // ArCore full resolution texture has a size of 1920 x 1080.
+ private static final int TEXTURE_WIDTH = 1920;
+ private static final int TEXTURE_HEIGHT = 1080;
- // We choose a lower sampling resolution.
- private static final int IMAGE_WIDTH = 1024;
- private static final int IMAGE_HEIGHT = 512;
+ // We choose a lower sampling resolution.
+ private static final int IMAGE_WIDTH = 1024;
+ private static final int IMAGE_HEIGHT = 512;
- private int mFrameBufferIndex = -1;
+ private int frameBufferIndex = -1;
- @Override
- protected void onCreate(Bundle savedInstanceState) {
- super.onCreate(savedInstanceState);
- setContentView(R.layout.activity_main);
- mSurfaceView = (GLSurfaceView) findViewById(R.id.surfaceview);
- mDisplayRotationHelper = new DisplayRotationHelper(/*context=*/ this);
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_main);
+ surfaceView = (GLSurfaceView) findViewById(R.id.surfaceview);
+ displayRotationHelper = new DisplayRotationHelper(/*context=*/ this);
- // Setup a touch listener to control the texture splitter position.
- mSurfaceView.setOnTouchListener(
- new View.OnTouchListener() {
- private static final float SWIPE_SCALING_FACTOR = 1.15f;
- private static final float MIN_DELTA = .01f;
- private float mStartPosition = 0;
- private float mStartCoordinate = 0;
+ // Setup a touch listener to control the texture splitter position.
+ surfaceView.setOnTouchListener(
+ new View.OnTouchListener() {
+ private static final float SWIPE_SCALING_FACTOR = 1.15f;
+ private static final float MIN_DELTA = .01f;
+ private float startPosition = 0;
+ private float startCoordX = 0;
+ private float startCoordY = 0;
+ private int displayRotation = 0;
- @Override
- public boolean onTouch(View v, MotionEvent e) {
- switch (e.getAction()) {
- case MotionEvent.ACTION_DOWN:
- {
- mStartCoordinate = e.getY();
- mStartPosition = mBackgroundRenderer.getSplitterPosition();
- break;
- }
- case MotionEvent.ACTION_MOVE:
- {
- float delta = (e.getY() - mStartCoordinate) / mSurfaceView.getHeight();
-
- if (Math.abs(delta) > MIN_DELTA) {
- float newPosition = mStartPosition + delta * SWIPE_SCALING_FACTOR;
- newPosition = Math.min(1.f, Math.max(0.f, newPosition));
- mBackgroundRenderer.setSplitterPosition(newPosition);
- }
- break;
- }
- default:
- break;
- }
-
- return true;
+ @Override
+ public boolean onTouch(View v, MotionEvent e) {
+ switch (e.getAction()) {
+ case MotionEvent.ACTION_DOWN:
+ {
+ startCoordX = e.getX();
+ startCoordY = e.getY();
+ displayRotation = displayRotationHelper.getRotation();
+ startPosition = backgroundRenderer.getSplitterPosition();
+ break;
}
- });
+ case MotionEvent.ACTION_MOVE:
+ {
+ float delta = 0;
+ switch (displayRotation) {
+ case Surface.ROTATION_90:
+ delta = (e.getX() - startCoordX) / surfaceView.getWidth();
+ break;
+ case Surface.ROTATION_180:
+ delta = -(e.getY() - startCoordY) / surfaceView.getHeight();
+ break;
+ case Surface.ROTATION_270:
+ delta = -(e.getX() - startCoordX) / surfaceView.getWidth();
+ break;
+ case Surface.ROTATION_0:
+ default:
+ delta = (e.getY() - startCoordY) / surfaceView.getHeight();
+ break;
+ }
+ if (Math.abs(delta) > MIN_DELTA) {
+ float newPosition = startPosition + delta * SWIPE_SCALING_FACTOR;
+ newPosition = Math.min(1.f, Math.max(0.f, newPosition));
+ backgroundRenderer.setSplitterPosition(newPosition);
+ }
+ break;
+ }
+ default:
+ break;
+ }
- // Set up renderer.
- mSurfaceView.setPreserveEGLContextOnPause(true);
- mSurfaceView.setEGLContextClientVersion(2);
- mSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
- mSurfaceView.setRenderer(this);
- mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ return true;
+ }
+ });
- Exception exception = null;
- String message = null;
- try {
- mSession = new Session(/* context= */ this);
- } catch (UnavailableArcoreNotInstalledException e) {
- message = "Please install ARCore";
- exception = e;
- } catch (UnavailableApkTooOldException e) {
- message = "Please update ARCore";
- exception = e;
- } catch (UnavailableSdkTooOldException e) {
- message = "Please update this app";
- exception = e;
- } catch (Exception e) {
- message = "This device does not support AR";
- exception = e;
- }
+ // Set up renderer.
+ surfaceView.setPreserveEGLContextOnPause(true);
+ surfaceView.setEGLContextClientVersion(2);
+ surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
+ surfaceView.setRenderer(this);
+ surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ }
- if (message != null) {
- showSnackbarMessage(message, true);
- Log.e(TAG, "Exception creating session", exception);
- return;
- }
+ @Override
+ protected void onResume() {
+ super.onResume();
- // Create default config and check if supported.
- Config defaultConfig = new Config(mSession);
- if (!mSession.isSupported(defaultConfig)) {
- showSnackbarMessage("This device does not support AR", true);
- }
- }
-
- @Override
- protected void onResume() {
- super.onResume();
-
+ if (session == null) {
+ Exception exception = null;
+ String message = null;
+ try {
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
- if (CameraPermissionHelper.hasCameraPermission(this)) {
- if (mSession != null) {
- // Note that order matters - see the note in onPause(), the reverse applies here.
- mSession.resume();
- }
- mSurfaceView.onResume();
- mDisplayRotationHelper.onResume();
- } else {
- CameraPermissionHelper.requestCameraPermission(this);
- }
- }
-
- @Override
- public void onPause() {
- super.onPause();
- // Note that the order matters - GLSurfaceView is paused first so that it does not try
- // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
- // still call mSession.update() and get a SessionPausedException.
- mDisplayRotationHelper.onPause();
- mSurfaceView.onPause();
- if (mSession != null) {
- mSession.pause();
- }
- }
-
- @Override
- public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
- if (requestCode != CAMERA_PERMISSION_CODE) {
+ if (!CameraPermissionHelper.hasCameraPermission(this)) {
+ CameraPermissionHelper.requestCameraPermission(this);
return;
}
- if (results.length > 0 && results[0] == PackageManager.PERMISSION_DENIED) {
- // Permission denied.
- if (CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
- // Permission denied without checking "Do not ask again."
- showSnackbarMessage("Camera permission is needed to run this application", true);
- } else if (!CameraPermissionHelper.hasCameraPermission(this)) {
- // Permission denied with checking "Do not ask again".
- // Show toast and take user to app settings where they can grant the camera
- // permission.
- CameraPermissionHelper.launchPermissionSettings(this);
+
+ session = new Session(/* context= */ this);
+ } catch (UnavailableArcoreNotInstalledException e) {
+ message = "Please install ARCore";
+ exception = e;
+ } catch (UnavailableApkTooOldException e) {
+ message = "Please update ARCore";
+ exception = e;
+ } catch (UnavailableSdkTooOldException e) {
+ message = "Please update this app";
+ exception = e;
+ } catch (Exception e) {
+ message = "This device does not support AR";
+ exception = e;
+ }
+
+ if (message != null) {
+ showSnackbarMessage(message, true);
+ Log.e(TAG, "Exception creating session", exception);
+ return;
+ }
+ }
+
+ // Note that order matters - see the note in onPause(), the reverse applies here.
+ session.resume();
+ surfaceView.onResume();
+ displayRotationHelper.onResume();
+ }
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ // Note that the order matters - GLSurfaceView is paused first so that it does not try
+ // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
+ // still call session.update() and get a SessionPausedException.
+ displayRotationHelper.onPause();
+ surfaceView.onPause();
+ if (session != null) {
+ session.pause();
+ }
+ }
+
+ @Override
+ public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
+ if (!CameraPermissionHelper.hasCameraPermission(this)) {
+ Toast.makeText(this, "Camera permission is needed to run this application", Toast.LENGTH_LONG)
+ .show();
+ if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
+ // Permission denied with checking "Do not ask again".
+ CameraPermissionHelper.launchPermissionSettings(this);
+ }
+ finish();
+ }
+ }
+
+ @Override
+ public void onWindowFocusChanged(boolean hasFocus) {
+ super.onWindowFocusChanged(hasFocus);
+ if (hasFocus) {
+ // Standard Android full-screen functionality.
+ getWindow()
+ .getDecorView()
+ .setSystemUiVisibility(
+ View.SYSTEM_UI_FLAG_LAYOUT_STABLE
+ | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
+ | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
+ | View.SYSTEM_UI_FLAG_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+ }
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+ GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
+
+ // Create the texture and pass it to ARCore session to be filled during update().
+ backgroundRenderer.createOnGlThread(/*context=*/ this);
+
+ if (session != null) {
+ session.setCameraTextureName(backgroundRenderer.getTextureId());
+ }
+
+ // The image format can be either IMAGE_FORMAT_RGBA or IMAGE_FORMAT_I8.
+ // Set keepAspectRatio to false so that the output image covers the whole viewport.
+ textureReader.create(CameraImageBuffer.IMAGE_FORMAT_I8, IMAGE_WIDTH, IMAGE_HEIGHT, false);
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 gl, int width, int height) {
+ displayRotationHelper.onSurfaceChanged(width, height);
+ GLES20.glViewport(0, 0, width, height);
+ }
+
+ @Override
+ public void onDrawFrame(GL10 gl) {
+ // Clear screen to notify driver it should not load any pixels from previous frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+ if (session == null) {
+ return;
+ }
+ // Notify ARCore session that the view size changed so that the perspective matrix and
+ // the video background can be properly adjusted.
+ displayRotationHelper.updateSessionIfNeeded(session);
+
+ try {
+ Frame frame = session.update();
+
+ // If there is a frame being requested previously, acquire the pixels and process it.
+ if (frameBufferIndex >= 0) {
+ CameraImageBuffer imageBuffer = textureReader.acquireFrame(frameBufferIndex);
+
+ // Detect the edges from the captured grayscale image.
+ if (EdgeDetector.detect(edgeImage, imageBuffer)) {
+ // Set the edge image to renderer as overlay.
+ backgroundRenderer.setOverlayImage(edgeImage);
+ }
+
+ // You should always release frame buffer after using. Otherwise the next call to
+ // submitFrame() may fail.
+ textureReader.releaseFrame(frameBufferIndex);
+ }
+
+ // Submit request for the texture from the current frame.
+ frameBufferIndex =
+ textureReader.submitFrame(
+ backgroundRenderer.getTextureId(), TEXTURE_WIDTH, TEXTURE_HEIGHT);
+
+ // Draw background video.
+ backgroundRenderer.draw(frame);
+
+ } catch (Throwable t) {
+ // Avoid crashing the application due to unhandled exceptions.
+ Log.e(TAG, "Exception on the OpenGL thread", t);
+ }
+ }
+
+ private void showSnackbarMessage(String message, boolean finishOnDismiss) {
+ messageSnackbar =
+ Snackbar.make(
+ MainActivity.this.findViewById(android.R.id.content),
+ message,
+ Snackbar.LENGTH_INDEFINITE);
+ messageSnackbar.getView().setBackgroundColor(0xbf323232);
+ if (finishOnDismiss) {
+ messageSnackbar.setAction(
+ "Dismiss",
+ new View.OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ messageSnackbar.dismiss();
}
- }
- }
-
- @Override
- public void onWindowFocusChanged(boolean hasFocus) {
- super.onWindowFocusChanged(hasFocus);
- if (hasFocus) {
- // Standard Android full-screen functionality.
- getWindow()
- .getDecorView()
- .setSystemUiVisibility(
- View.SYSTEM_UI_FLAG_LAYOUT_STABLE
- | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
- | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
- | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
- | View.SYSTEM_UI_FLAG_FULLSCREEN
- | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- }
- }
-
- @Override
- public void onSurfaceCreated(GL10 gl, EGLConfig config) {
- GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
-
- // Create the texture and pass it to ARCore session to be filled during update().
- mBackgroundRenderer.createOnGlThread(/*context=*/ this);
-
- if (mSession != null) {
- mSession.setCameraTextureName(mBackgroundRenderer.getTextureId());
- }
-
- // The image format can be either IMAGE_FORMAT_RGBA or IMAGE_FORMAT_I8.
- // Set keepAspectRatio to false so that the output image covers the whole viewport.
- mTextureReader.create(CameraImageBuffer.IMAGE_FORMAT_I8, IMAGE_WIDTH, IMAGE_HEIGHT, false);
- }
-
- @Override
- public void onSurfaceChanged(GL10 gl, int width, int height) {
- mDisplayRotationHelper.onSurfaceChanged(width, height);
- GLES20.glViewport(0, 0, width, height);
- }
-
- @Override
- public void onDrawFrame(GL10 gl) {
- // Clear screen to notify driver it should not load any pixels from previous frame.
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
- if (mSession == null) {
- return;
- }
- // Notify ARCore session that the view size changed so that the perspective matrix and
- // the video background can be properly adjusted.
- mDisplayRotationHelper.updateSessionIfNeeded(mSession);
-
- try {
- Frame frame = mSession.update();
-
- // If there is a frame being requested previously, acquire the pixels and process it.
- if (mFrameBufferIndex >= 0) {
- CameraImageBuffer imageBuffer = mTextureReader.acquireFrame(mFrameBufferIndex);
-
- // Detect the edges from the captured grayscale image.
- if (EdgeDetector.detect(mEdgeImage, imageBuffer)) {
- // Set the edge image to renderer as overlay.
- mBackgroundRenderer.setOverlayImage(mEdgeImage);
- }
-
- // You should always release frame buffer after using. Otherwise the next call to
- // submitFrame() may fail.
- mTextureReader.releaseFrame(mFrameBufferIndex);
+ });
+ messageSnackbar.addCallback(
+ new BaseTransientBottomBar.BaseCallback<Snackbar>() {
+ @Override
+ public void onDismissed(Snackbar transientBottomBar, int event) {
+ super.onDismissed(transientBottomBar, event);
+ finish();
}
-
- // Submit request for the texture from the current frame.
- mFrameBufferIndex = mTextureReader.submitFrame(
- mBackgroundRenderer.getTextureId(), TEXTURE_WIDTH, TEXTURE_HEIGHT);
-
- // Draw background video.
- mBackgroundRenderer.draw(frame);
-
- } catch (Throwable t) {
- // Avoid crashing the application due to unhandled exceptions.
- Log.e(TAG, "Exception on the OpenGL thread", t);
- }
+ });
}
-
- private void showSnackbarMessage(String message, boolean finishOnDismiss) {
- mMessageSnackbar =
- Snackbar.make(
- MainActivity.this.findViewById(android.R.id.content),
- message,
- Snackbar.LENGTH_INDEFINITE);
- mMessageSnackbar.getView().setBackgroundColor(0xbf323232);
- if (finishOnDismiss) {
- mMessageSnackbar.setAction(
- "Dismiss",
- new View.OnClickListener() {
- @Override
- public void onClick(View v) {
- mMessageSnackbar.dismiss();
- }
- });
- mMessageSnackbar.addCallback(
- new BaseTransientBottomBar.BaseCallback<Snackbar>() {
- @Override
- public void onDismissed(Snackbar transientBottomBar, int event) {
- super.onDismissed(transientBottomBar, event);
- finish();
- }
- });
- }
- mMessageSnackbar.show();
- }
+ messageSnackbar.show();
+ }
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/package-info.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/package-info.java
deleted file mode 100644
index c9e451a..0000000
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2017 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * A sample to demonstrate marker detection library.
- */
-package com.google.ar.core.examples.java.computervision;
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/BackgroundRenderer.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/BackgroundRenderer.java
index 0b04ed2..9514e61 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/BackgroundRenderer.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/BackgroundRenderer.java
@@ -33,232 +33,225 @@
* ARCore to be filled with the camera image.
*/
public class BackgroundRenderer {
- private static final String TAG = BackgroundRenderer.class.getSimpleName();
+ private static final String TAG = BackgroundRenderer.class.getSimpleName();
- private static final int COORDS_PER_VERTEX = 3;
- private static final int TEXCOORDS_PER_VERTEX = 2;
- private static final int FLOAT_SIZE = 4;
+ private static final int COORDS_PER_VERTEX = 3;
+ private static final int TEXCOORDS_PER_VERTEX = 2;
+ private static final int FLOAT_SIZE = 4;
- private FloatBuffer mQuadVertices;
- private FloatBuffer mQuadTexCoord;
- private FloatBuffer mQuadTexCoordTransformed;
+ private FloatBuffer quadVertices;
+ private FloatBuffer quadTexCoord;
+ private FloatBuffer quadTexCoordTransformed;
- private int mQuadProgram;
+ private int quadProgram;
- private int mQuadPositionAttrib;
- private int mQuadTexCoordAttrib;
- private int mQuadSplitterUniform;
- private int mBackgroundTextureId = -1;
- private int mOverlayTextureId = -1;
- private float mSplitterPosition = 0.5f;
+ private int quadPositionAttrib;
+ private int quadTexCoordAttrib;
+ private int quadSplitterUniform;
+ private int backgroundTextureId = -1;
+ private int overlayTextureId = -1;
+ private float splitterPosition = 0.5f;
- private CameraImageBuffer mOverlayImageBuffer;
+ private CameraImageBuffer overlayImageBuffer;
- public int getTextureId() {
- return mBackgroundTextureId;
+ public int getTextureId() {
+ return backgroundTextureId;
+ }
+
+ /**
+ * Allocates and initializes OpenGL resources needed by the background renderer. Must be called on
+ * the OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10,
+ * EGLConfig)}.
+ *
+ * @param context Needed to access shader source.
+ */
+ public void createOnGlThread(Context context) {
+ // Generate the background texture.
+ int[] textures = new int[2];
+ GLES20.glGenTextures(2, textures, 0);
+ backgroundTextureId = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, backgroundTextureId);
+ GLES20.glTexParameteri(
+ GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(
+ GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(
+ GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
+ GLES20.glTexParameteri(
+ GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
+
+ overlayTextureId = textures[1];
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, overlayTextureId);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
+
+ int numVertices = 4;
+ if (numVertices != QUAD_COORDS.length / COORDS_PER_VERTEX) {
+ throw new RuntimeException("Unexpected number of vertices in BackgroundRenderer.");
}
- /**
- * Allocates and initializes OpenGL resources needed by the background renderer. Must be called
- * on the OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10,
- * EGLConfig)}.
- *
- * @param context Needed to access shader source.
- */
- public void createOnGlThread(Context context) {
- // Generate the background texture.
- int[] textures = new int[2];
- GLES20.glGenTextures(2, textures, 0);
- mBackgroundTextureId = textures[0];
- GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mBackgroundTextureId);
- GLES20.glTexParameteri(
- GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(
- GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(
- GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
- GLES20.glTexParameteri(
- GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
+ ByteBuffer bbVertices = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE);
+ bbVertices.order(ByteOrder.nativeOrder());
+ quadVertices = bbVertices.asFloatBuffer();
+ quadVertices.put(QUAD_COORDS);
+ quadVertices.position(0);
- mOverlayTextureId = textures[1];
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mOverlayTextureId);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
- GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
- GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
- GLES20.GL_NEAREST);
+ ByteBuffer bbTexCoords =
+ ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
+ bbTexCoords.order(ByteOrder.nativeOrder());
+ quadTexCoord = bbTexCoords.asFloatBuffer();
+ quadTexCoord.put(QUAD_TEXCOORDS);
+ quadTexCoord.position(0);
- int numVertices = 4;
- if (numVertices != QUAD_COORDS.length / COORDS_PER_VERTEX) {
- throw new RuntimeException("Unexpected number of vertices in BackgroundRenderer.");
- }
+ ByteBuffer bbTexCoordsTransformed =
+ ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
+ bbTexCoordsTransformed.order(ByteOrder.nativeOrder());
+ quadTexCoordTransformed = bbTexCoordsTransformed.asFloatBuffer();
- ByteBuffer bbVertices = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE);
- bbVertices.order(ByteOrder.nativeOrder());
- mQuadVertices = bbVertices.asFloatBuffer();
- mQuadVertices.put(QUAD_COORDS);
- mQuadVertices.position(0);
+ int vertexShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.screenquad_vertex);
+ int fragmentShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.screenquad_fragment);
- ByteBuffer bbTexCoords =
- ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
- bbTexCoords.order(ByteOrder.nativeOrder());
- mQuadTexCoord = bbTexCoords.asFloatBuffer();
- mQuadTexCoord.put(QUAD_TEXCOORDS);
- mQuadTexCoord.position(0);
+ quadProgram = GLES20.glCreateProgram();
+ GLES20.glAttachShader(quadProgram, vertexShader);
+ GLES20.glAttachShader(quadProgram, fragmentShader);
+ GLES20.glLinkProgram(quadProgram);
+ GLES20.glUseProgram(quadProgram);
- ByteBuffer bbTexCoordsTransformed =
- ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
- bbTexCoordsTransformed.order(ByteOrder.nativeOrder());
- mQuadTexCoordTransformed = bbTexCoordsTransformed.asFloatBuffer();
+ ShaderUtil.checkGLError(TAG, "Program creation");
- int vertexShader =
- ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.screenquad_vertex);
- int fragmentShader =
- ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER,
- R.raw.screenquad_fragment);
+ quadPositionAttrib = GLES20.glGetAttribLocation(quadProgram, "a_Position");
+ quadTexCoordAttrib = GLES20.glGetAttribLocation(quadProgram, "a_TexCoord");
+ quadSplitterUniform = GLES20.glGetUniformLocation(quadProgram, "s_SplitterPosition");
- mQuadProgram = GLES20.glCreateProgram();
- GLES20.glAttachShader(mQuadProgram, vertexShader);
- GLES20.glAttachShader(mQuadProgram, fragmentShader);
- GLES20.glLinkProgram(mQuadProgram);
- GLES20.glUseProgram(mQuadProgram);
+ int texLoc = GLES20.glGetUniformLocation(quadProgram, "TexVideo");
+ GLES20.glUniform1i(texLoc, 0);
+ texLoc = GLES20.glGetUniformLocation(quadProgram, "TexImage");
+ GLES20.glUniform1i(texLoc, 1);
- ShaderUtil.checkGLError(TAG, "Program creation");
+ ShaderUtil.checkGLError(TAG, "Program parameters");
+ }
- mQuadPositionAttrib = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
- mQuadTexCoordAttrib = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
- mQuadSplitterUniform = GLES20.glGetUniformLocation(mQuadProgram, "s_SplitterPosition");
+ /**
+ * Sets new overlay image buffer. This image buffer is used to render side by side with background
+ * video.
+ *
+ * @param imageBuffer the new image buffer for the overlay texture.
+ */
+ public void setOverlayImage(CameraImageBuffer imageBuffer) {
+ overlayImageBuffer = imageBuffer;
+ }
- int texLoc = GLES20.glGetUniformLocation(mQuadProgram, "TexVideo");
- GLES20.glUniform1i(texLoc, 0);
- texLoc = GLES20.glGetUniformLocation(mQuadProgram, "TexImage");
- GLES20.glUniform1i(texLoc, 1);
+ /**
+ * Gets the texture splitter position.
+ *
+ * @return the splitter position.
+ */
+ public float getSplitterPosition() {
+ return splitterPosition;
+ }
- ShaderUtil.checkGLError(TAG, "Program parameters");
+ /**
+ * Sets the splitter position. This position defines the splitting position between the background
+ * video and the image.
+ *
+ * @param position the new splitter position.
+ */
+ public void setSplitterPosition(float position) {
+ splitterPosition = position;
+ }
+
+ /**
+ * Draws the AR background image. The image will be drawn such that virtual content rendered with
+ * the matrices provided by {@link Frame#getViewMatrix(float[], int)} and {@link
+ * Session#getProjectionMatrix(float[], int, float, float)} will accurately follow static physical
+ * objects. This must be called <b>before</b> drawing virtual content.
+ *
+ * @param frame The last {@code Frame} returned by {@link Session#update()}.
+ */
+ public void draw(Frame frame) {
+ // If display rotation changed (also includes view size change), we need to re-query the uv
+ // coordinates for the screen rect, as they may have changed as well.
+ if (frame.hasDisplayGeometryChanged()) {
+ frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
}
- /**
- * Sets new overlay image buffer. This image buffer is used to render side by side with
- * background video.
- *
- * @param imageBuffer the new image buffer for the overlay texture.
- */
- public void setOverlayImage(CameraImageBuffer imageBuffer) {
- mOverlayImageBuffer = imageBuffer;
+ // No need to test or write depth, the screen quad has arbitrary depth, and is expected
+ // to be drawn first.
+ GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+ GLES20.glDepthMask(false);
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, backgroundTextureId);
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+
+ // Apply overlay image buffer
+ if (overlayImageBuffer != null
+ && overlayImageBuffer.format == CameraImageBuffer.IMAGE_FORMAT_I8) {
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, overlayTextureId);
+
+ ByteBuffer pixelBuffer = overlayImageBuffer.buffer;
+ pixelBuffer.position(0);
+ GLES20.glTexImage2D(
+ GLES20.GL_TEXTURE_2D,
+ 0,
+ GLES20.GL_LUMINANCE,
+ overlayImageBuffer.width,
+ overlayImageBuffer.height,
+ 0,
+ GLES20.GL_LUMINANCE,
+ GLES20.GL_UNSIGNED_BYTE,
+ pixelBuffer);
}
- /**
- * Gets the texture splitter position.
- *
- * @return the splitter position.
- */
- public float getSplitterPosition() {
- return mSplitterPosition;
- }
+ GLES20.glUseProgram(quadProgram);
- /**
- * Sets the splitter position. This position defines the splitting position between the
- * background video and the image.
- *
- * @param position the new splitter position.
- */
- public void setSplitterPosition(float position) {
- mSplitterPosition = position;
- }
+ // Set the vertex positions.
+ GLES20.glVertexAttribPointer(
+ quadPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices);
- /**
- * Draws the AR background image. The image will be drawn such that virtual content rendered
- * with the matrices provided by {@link Frame#getViewMatrix(float[], int)} and {@link
- * Session#getProjectionMatrix(float[], int, float, float)} will accurately follow static
- * physical objects. This must be called <b>before</b> drawing virtual content.
- *
- * @param frame The last {@code Frame} returned by {@link Session#update()}.
- */
- public void draw(Frame frame) {
- // If display rotation changed (also includes view size change), we need to re-query the uv
- // coordinates for the screen rect, as they may have changed as well.
- if (frame.hasDisplayGeometryChanged()) {
- frame.transformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed);
- }
+ // Set splitter position.
+ GLES20.glUniform1f(quadSplitterUniform, splitterPosition);
- // No need to test or write depth, the screen quad has arbitrary depth, and is expected
- // to be drawn first.
- GLES20.glDisable(GLES20.GL_DEPTH_TEST);
- GLES20.glDepthMask(false);
+ // Set the texture coordinates.
+ GLES20.glVertexAttribPointer(
+ quadTexCoordAttrib,
+ TEXCOORDS_PER_VERTEX,
+ GLES20.GL_FLOAT,
+ false,
+ 0,
+ quadTexCoordTransformed);
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mBackgroundTextureId);
+ // Enable vertex arrays
+ GLES20.glEnableVertexAttribArray(quadPositionAttrib);
+ GLES20.glEnableVertexAttribArray(quadTexCoordAttrib);
- GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
- // Apply overlay image buffer
- if (mOverlayImageBuffer != null
- && mOverlayImageBuffer.format == CameraImageBuffer.IMAGE_FORMAT_I8) {
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mOverlayTextureId);
+ // Disable vertex arrays
+ GLES20.glDisableVertexAttribArray(quadPositionAttrib);
+ GLES20.glDisableVertexAttribArray(quadTexCoordAttrib);
- ByteBuffer pixelBuffer = mOverlayImageBuffer.buffer;
- pixelBuffer.position(0);
- GLES20.glTexImage2D(
- GLES20.GL_TEXTURE_2D,
- 0,
- GLES20.GL_LUMINANCE,
- mOverlayImageBuffer.width,
- mOverlayImageBuffer.height,
- 0,
- GLES20.GL_LUMINANCE,
- GLES20.GL_UNSIGNED_BYTE,
- pixelBuffer);
- }
+ // Restore the depth state for further drawing.
+ GLES20.glDepthMask(true);
+ GLES20.glEnable(GLES20.GL_DEPTH_TEST);
- GLES20.glUseProgram(mQuadProgram);
+ ShaderUtil.checkGLError(TAG, "Draw");
+ }
- // Set the vertex positions.
- GLES20.glVertexAttribPointer(
- mQuadPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices);
+ private static final float[] QUAD_COORDS =
+ new float[] {
+ -1.0f, -1.0f, 0.0f, -1.0f, +1.0f, 0.0f, +1.0f, -1.0f, 0.0f, +1.0f, +1.0f, 0.0f,
+ };
- // Set splitter position.
- GLES20.glUniform1f(mQuadSplitterUniform, mSplitterPosition);
-
- // Set the texture coordinates.
- GLES20.glVertexAttribPointer(
- mQuadTexCoordAttrib,
- TEXCOORDS_PER_VERTEX,
- GLES20.GL_FLOAT,
- false,
- 0,
- mQuadTexCoordTransformed);
-
- // Enable vertex arrays
- GLES20.glEnableVertexAttribArray(mQuadPositionAttrib);
- GLES20.glEnableVertexAttribArray(mQuadTexCoordAttrib);
-
- GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
-
- // Disable vertex arrays
- GLES20.glDisableVertexAttribArray(mQuadPositionAttrib);
- GLES20.glDisableVertexAttribArray(mQuadTexCoordAttrib);
-
- // Restore the depth state for further drawing.
- GLES20.glDepthMask(true);
- GLES20.glEnable(GLES20.GL_DEPTH_TEST);
-
- ShaderUtil.checkGLError(TAG, "Draw");
- }
-
- private static final float[] QUAD_COORDS =
- new float[] {
- -1.0f, -1.0f, 0.0f,
- -1.0f, +1.0f, 0.0f,
- +1.0f, -1.0f, 0.0f,
- +1.0f, +1.0f, 0.0f,
- };
-
- private static final float[] QUAD_TEXCOORDS =
- new float[] {
- 0.0f, 1.0f,
- 0.0f, 0.0f,
- 1.0f, 1.0f,
- 1.0f, 0.0f,
- };
+ private static final float[] QUAD_TEXCOORDS =
+ new float[] {
+ 0.0f, 1.0f,
+ 0.0f, 0.0f,
+ 1.0f, 1.0f,
+ 1.0f, 0.0f,
+ };
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/ShaderUtil.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/ShaderUtil.java
index 8fc58b6..c401ccf 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/ShaderUtil.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/ShaderUtil.java
@@ -17,97 +17,94 @@
import android.content.Context;
import android.opengl.GLES20;
import android.util.Log;
-
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
-/**
- * Shader helper functions.
- */
+/** Shader helper functions. */
public class ShaderUtil {
- /**
- * Converts a raw text file, saved as a resource, into an OpenGL ES shader.
- *
- * @param type The type of shader we will be creating.
- * @param resId The resource ID of the raw text file about to be turned into a shader.
- * @return The shader object handler.
- */
- public static int loadGLShader(String tag, Context context, int type, int resId) {
- String code = readRawTextFile(context, resId);
- return loadGLShader(tag, type, code);
+ /**
+ * Converts a raw text file, saved as a resource, into an OpenGL ES shader.
+ *
+ * @param type The type of shader we will be creating.
+ * @param resId The resource ID of the raw text file about to be turned into a shader.
+ * @return The shader object handler.
+ */
+ public static int loadGLShader(String tag, Context context, int type, int resId) {
+ String code = readRawTextFile(context, resId);
+ return loadGLShader(tag, type, code);
+ }
+
+ /**
+ * Converts a string into an OpenGL ES shader.
+ *
+ * @param type The type of shader we will be creating.
+ * @param code The string that contains the code of the shader.
+ * @return The shader object handler.
+ */
+ public static int loadGLShader(String tag, int type, String code) {
+ int shader = GLES20.glCreateShader(type);
+ GLES20.glShaderSource(shader, code);
+ GLES20.glCompileShader(shader);
+
+ // Get the compilation status.
+ final int[] compileStatus = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
+
+ // If the compilation failed, delete the shader.
+ if (compileStatus[0] == 0) {
+ Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
}
- /**
- * Converts a string into an OpenGL ES shader.
- *
- * @param type The type of shader we will be creating.
- * @param code The string that contains the code of the shader.
- * @return The shader object handler.
- */
- public static int loadGLShader(String tag, int type, String code) {
- int shader = GLES20.glCreateShader(type);
- GLES20.glShaderSource(shader, code);
- GLES20.glCompileShader(shader);
-
- // Get the compilation status.
- final int[] compileStatus = new int[1];
- GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
-
- // If the compilation failed, delete the shader.
- if (compileStatus[0] == 0) {
- Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
- GLES20.glDeleteShader(shader);
- shader = 0;
- }
-
- if (shader == 0) {
- throw new RuntimeException("Error creating shader.");
- }
-
- return shader;
+ if (shader == 0) {
+ throw new RuntimeException("Error creating shader.");
}
- /**
- * Checks if we've had an error inside of OpenGL ES, and if so what that error is.
- *
- * @param label Label to report in case of error.
- * @throws RuntimeException If an OpenGL error is detected.
- */
- public static void checkGLError(String tag, String label) {
- int lastError = GLES20.GL_NO_ERROR;
- // Drain the queue of all errors.
- int error;
- while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
- Log.e(tag, label + ": glError " + error);
- lastError = error;
- }
- if (lastError != GLES20.GL_NO_ERROR) {
- throw new RuntimeException(label + ": glError " + lastError);
- }
- }
+ return shader;
+ }
- /**
- * Converts a raw text file into a string.
- *
- * @param resId The resource ID of the raw text file about to be turned into a shader.
- * @return The context of the text file, or null in case of error.
- */
- private static String readRawTextFile(Context context, int resId) {
- InputStream inputStream = context.getResources().openRawResource(resId);
- try {
- BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
- StringBuilder sb = new StringBuilder();
- String line;
- while ((line = reader.readLine()) != null) {
- sb.append(line).append("\n");
- }
- reader.close();
- return sb.toString();
- } catch (IOException e) {
- e.printStackTrace();
- }
- return null;
+ /**
+ * Checks if we've had an error inside of OpenGL ES, and if so what that error is.
+ *
+ * @param label Label to report in case of error.
+ * @throws RuntimeException If an OpenGL error is detected.
+ */
+ public static void checkGLError(String tag, String label) {
+ int lastError = GLES20.GL_NO_ERROR;
+ // Drain the queue of all errors.
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(tag, label + ": glError " + error);
+ lastError = error;
}
+ if (lastError != GLES20.GL_NO_ERROR) {
+ throw new RuntimeException(label + ": glError " + lastError);
+ }
+ }
+
+ /**
+ * Converts a raw text file into a string.
+ *
+ * @param resId The resource ID of the raw text file about to be turned into a shader.
+ * @return The context of the text file, or null in case of error.
+ */
+ private static String readRawTextFile(Context context, int resId) {
+ InputStream inputStream = context.getResources().openRawResource(resId);
+ try {
+ BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
+ StringBuilder sb = new StringBuilder();
+ String line;
+ while ((line = reader.readLine()) != null) {
+ sb.append(line).append("\n");
+ }
+ reader.close();
+ return sb.toString();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/package-info.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/package-info.java
deleted file mode 100644
index 3695c78..0000000
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/rendering/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2017 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * This package contains classes that do the rendering for this example.
- */
-package com.google.ar.core.examples.java.computervision.rendering;
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraImageBuffer.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraImageBuffer.java
index 094ffcf..1c7e630 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraImageBuffer.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraImageBuffer.java
@@ -18,57 +18,56 @@
/** Image Buffer Class. */
public class CameraImageBuffer {
- /** The id corresponding to RGBA8888. */
- public static final int IMAGE_FORMAT_RGBA = 0;
+ /** The id corresponding to RGBA8888. */
+ public static final int IMAGE_FORMAT_RGBA = 0;
- /** The id corresponding to grayscale. */
- public static final int IMAGE_FORMAT_I8 = 1;
+ /** The id corresponding to grayscale. */
+ public static final int IMAGE_FORMAT_I8 = 1;
- /** The width of the image, in pixels. */
- public int width;
+ /** The width of the image, in pixels. */
+ public int width;
- /** The height of the image, in pixels. */
- public int height;
+ /** The height of the image, in pixels. */
+ public int height;
- /** The image buffer. */
- public ByteBuffer buffer;
+ /** The image buffer. */
+ public ByteBuffer buffer;
- /** Pixel format. Can be either IMAGE_FORMAT_RGBA or IMAGE_FORMAT_I8.*/
- public int format;
+ /** Pixel format. Can be either IMAGE_FORMAT_RGBA or IMAGE_FORMAT_I8. */
+ public int format;
- /**
- * Default constructor.
- */
- public CameraImageBuffer() {
- width = 1;
- height = 1;
- format = IMAGE_FORMAT_RGBA;
- buffer = ByteBuffer.allocateDirect(4);
+ /** Default constructor. */
+ public CameraImageBuffer() {
+ width = 1;
+ height = 1;
+ format = IMAGE_FORMAT_RGBA;
+ buffer = ByteBuffer.allocateDirect(4);
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param imgWidth the width of the image, in pixels.
+ * @param imgHeight the height of the image, in pixels.
+ * @param imgFormat the format of the image.
+ * @param imgBuffer the buffer of the image pixels.
+ */
+ public CameraImageBuffer(int imgWidth, int imgHeight, int imgFormat, ByteBuffer imgBuffer) {
+ if (imgWidth == 0 || imgHeight == 0) {
+ throw new RuntimeException("Invalid image size.");
}
- /**
- * Constructor.
- * @param imgWidth the width of the image, in pixels.
- * @param imgHeight the height of the image, in pixels.
- * @param imgFormat the format of the image.
- * @param imgBuffer the buffer of the image pixels.
- */
- public CameraImageBuffer(int imgWidth, int imgHeight, int imgFormat, ByteBuffer imgBuffer) {
- if (imgWidth == 0 || imgHeight == 0) {
- throw new RuntimeException("Invalid image size.");
- }
-
- if (imgFormat != IMAGE_FORMAT_RGBA && imgFormat != IMAGE_FORMAT_I8) {
- throw new RuntimeException("Invalid image format.");
- }
-
- if (imgBuffer == null) {
- throw new RuntimeException("Pixel buffer cannot be null.");
- }
-
- width = imgWidth;
- height = imgHeight;
- format = imgFormat;
- buffer = imgBuffer;
+ if (imgFormat != IMAGE_FORMAT_RGBA && imgFormat != IMAGE_FORMAT_I8) {
+ throw new RuntimeException("Invalid image format.");
}
+
+ if (imgBuffer == null) {
+ throw new RuntimeException("Pixel buffer cannot be null.");
+ }
+
+ width = imgWidth;
+ height = imgHeight;
+ format = imgFormat;
+ buffer = imgBuffer;
+ }
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraPermissionHelper.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraPermissionHelper.java
index b1dd07e..19b2b1c 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraPermissionHelper.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/CameraPermissionHelper.java
@@ -22,44 +22,34 @@
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
-import android.widget.Toast;
/** Helper to ask camera permission. */
public final class CameraPermissionHelper {
- public static final int CAMERA_PERMISSION_CODE = 0;
- private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA;
+ private static final int CAMERA_PERMISSION_CODE = 0;
+ private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA;
- /**
- * Check to see we have the necessary permissions for this app.
- */
- public static boolean hasCameraPermission(Activity activity) {
- return ContextCompat.checkSelfPermission(activity, CAMERA_PERMISSION)
- == PackageManager.PERMISSION_GRANTED;
- }
+ /** Check to see we have the necessary permissions for this app. */
+ public static boolean hasCameraPermission(Activity activity) {
+ return ContextCompat.checkSelfPermission(activity, CAMERA_PERMISSION)
+ == PackageManager.PERMISSION_GRANTED;
+ }
- /** Check to see if we need to show the rationale for this permission. */
- public static boolean shouldShowRequestPermissionRationale(Activity activity) {
- return ActivityCompat.shouldShowRequestPermissionRationale(activity, CAMERA_PERMISSION);
- }
+ /** Check to see we have the necessary permissions for this app, and ask for them if we don't. */
+ public static void requestCameraPermission(Activity activity) {
+ ActivityCompat.requestPermissions(
+ activity, new String[] {CAMERA_PERMISSION}, CAMERA_PERMISSION_CODE);
+ }
- /**
- * Check to see we have the necessary permissions for this app, and ask for them if we don't.
- */
- public static void requestCameraPermission(Activity activity) {
- ActivityCompat.requestPermissions(activity, new String[]{CAMERA_PERMISSION},
- CAMERA_PERMISSION_CODE);
- }
+ /** Check to see if we need to show the rationale for this permission. */
+ public static boolean shouldShowRequestPermissionRationale(Activity activity) {
+ return ActivityCompat.shouldShowRequestPermissionRationale(activity, CAMERA_PERMISSION);
+ }
- /**
- * Launch Application Setting to grant permission, show a toast to explain why.
- * This will finish() the activity, as we cannot run an AR activity without camera permissions.
- */
- public static void launchPermissionSettings(Activity activity) {
- Toast.makeText(activity, "Camera permission is needed", Toast.LENGTH_LONG).show();
- Intent intent = new Intent();
- intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
- intent.setData(Uri.fromParts("package", activity.getPackageName(), null));
- activity.startActivity(intent);
- activity.finish();
- }
+ /** Launch Application Setting to grant permission. */
+ public static void launchPermissionSettings(Activity activity) {
+ Intent intent = new Intent();
+ intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
+ intent.setData(Uri.fromParts("package", activity.getPackageName(), null));
+ activity.startActivity(intent);
+ }
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/DisplayRotationHelper.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/DisplayRotationHelper.java
index 1f7a062..6c8954a 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/DisplayRotationHelper.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/DisplayRotationHelper.java
@@ -28,80 +28,78 @@
* events.
*/
public class DisplayRotationHelper implements DisplayListener {
- private boolean mViewportChanged;
- private int mViewportWidth;
- private int mViewportHeight;
- private final Context mContext;
- private final Display mDisplay;
+ private boolean viewportChanged;
+ private int viewportWidth;
+ private int viewportHeight;
+ private final Context context;
+ private final Display display;
- /**
- * Constructs the DisplayRotationHelper but does not register the listener yet.
- *
- * @param context the Android {@link Context}.
- */
- public DisplayRotationHelper(Context context) {
- mContext = context;
- mDisplay = context.getSystemService(WindowManager.class).getDefaultDisplay();
+ /**
+ * Constructs the DisplayRotationHelper but does not register the listener yet.
+ *
+ * @param context the Android {@link Context}.
+ */
+ public DisplayRotationHelper(Context context) {
+ this.context = context;
+ display = context.getSystemService(WindowManager.class).getDefaultDisplay();
+ }
+
+ /** Registers the display listener. Should be called from {@link Activity#onResume()}. */
+ public void onResume() {
+ context.getSystemService(DisplayManager.class).registerDisplayListener(this, null);
+ }
+
+ /** Unregisters the display listener. Should be called from {@link Activity#onPause()}. */
+ public void onPause() {
+ context.getSystemService(DisplayManager.class).unregisterDisplayListener(this);
+ }
+
+ /**
+ * Records a change in surface dimensions. This will be later used by {@link
+ * #updateSessionIfNeeded(Session)}. Should be called from {@link
+ * android.opengl.GLSurfaceView.Renderer
+ * #onSurfaceChanged(javax.microedition.khronos.opengles.GL10, int, int)}.
+ *
+ * @param width the updated width of the surface.
+ * @param height the updated height of the surface.
+ */
+ public void onSurfaceChanged(int width, int height) {
+ viewportWidth = width;
+ viewportHeight = height;
+ viewportChanged = true;
+ }
+
+ /**
+ * Updates the session display geometry if a change was posted either by {@link
+ * #onSurfaceChanged(int, int)} call or by {@link #onDisplayChanged(int)} system callback. This
+ * function should be called explicitly before each call to {@link Session#update()}. This
+ * function will also clear the 'pending update' (viewportChanged) flag.
+ *
+ * @param session the {@link Session} object to update if display geometry changed.
+ */
+ public void updateSessionIfNeeded(Session session) {
+ if (viewportChanged) {
+ int displayRotation = display.getRotation();
+ session.setDisplayGeometry(displayRotation, viewportWidth, viewportHeight);
+ viewportChanged = false;
}
+ }
- /** Registers the display listener. Should be called from {@link Activity#onResume()}. */
- public void onResume() {
- mContext.getSystemService(DisplayManager.class).registerDisplayListener(this, null);
- }
+ /**
+ * Returns the current rotation state of android display. Same as {@link Display#getRotation()}.
+ */
+ public int getRotation() {
+ return display.getRotation();
+ }
- /** Unregisters the display listener. Should be called from {@link Activity#onPause()}. */
- public void onPause() {
- mContext.getSystemService(DisplayManager.class).unregisterDisplayListener(this);
- }
+ @Override
+ public void onDisplayAdded(int displayId) {}
- /**
- * Records a change in surface dimensions. This will be later used by
- * {@link #updateSessionIfNeeded(Session)}. Should be called from
- * {@link android.opengl.GLSurfaceView.Renderer
- * #onSurfaceChanged(javax.microedition.khronos.opengles.GL10, int, int)}.
- *
- * @param width the updated width of the surface.
- * @param height the updated height of the surface.
- */
- public void onSurfaceChanged(int width, int height) {
- mViewportWidth = width;
- mViewportHeight = height;
- mViewportChanged = true;
- }
+ @Override
+ public void onDisplayRemoved(int displayId) {}
- /**
- * Updates the session display geometry if a change was posted either by
- * {@link #onSurfaceChanged(int, int)} call or by {@link #onDisplayChanged(int)} system
- * callback. This function should be called explicitly before each call to
- * {@link Session#update()}. This function will also clear the 'pending update'
- * (viewportChanged) flag.
- *
- * @param session the {@link Session} object to update if display geometry changed.
- */
- public void updateSessionIfNeeded(Session session) {
- if (mViewportChanged) {
- int displayRotation = mDisplay.getRotation();
- session.setDisplayGeometry(displayRotation, mViewportWidth, mViewportHeight);
- mViewportChanged = false;
- }
- }
-
- /**
- * Returns the current rotation state of android display.
- * Same as {@link Display#getRotation()}.
- */
- public int getRotation() {
- return mDisplay.getRotation();
- }
-
- @Override
- public void onDisplayAdded(int displayId) {}
-
- @Override
- public void onDisplayRemoved(int displayId) {}
-
- @Override
- public void onDisplayChanged(int displayId) {
- mViewportChanged = true;
- }
+ @Override
+ public void onDisplayChanged(int displayId) {
+ viewportChanged = true;
+ }
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/EdgeDetector.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/EdgeDetector.java
index 1e00559..aaeeaa9 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/EdgeDetector.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/EdgeDetector.java
@@ -18,101 +18,98 @@
import android.util.Log;
import java.nio.ByteBuffer;
-/**
- * Detects edges from input grayscale image.
- */
+/** Detects edges from input grayscale image. */
public class EdgeDetector {
- private static final String TAG = EdgeDetector.class.getSimpleName();
+ private static final String TAG = EdgeDetector.class.getSimpleName();
- private static byte[] s_ImageBuffer = new byte[0];
- private static int s_ImageBufferSize = 0;
+ private static byte[] s_ImageBuffer = new byte[0];
+ private static int s_ImageBufferSize = 0;
- /**
- * Detects edges from the input grayscale image.
- *
- * @param outputImage Output image buffer, which has a size of width * height.
- * @param inputImage Input image.
- * @return False if the outputImage buffer is too small, True otherwise.
- */
- public static boolean detect(CameraImageBuffer outputImage, CameraImageBuffer inputImage) {
- if (inputImage == null
- || inputImage.format != CameraImageBuffer.IMAGE_FORMAT_I8) {
- Log.e(TAG, "Invalid input image!");
- return false;
- }
-
- if (outputImage == null) {
- Log.e(TAG, "Invalid output image!");
- return false;
- }
-
- // Recreate output image buffer if it is different from input image buffer.
- if (outputImage.width != inputImage.width
- || outputImage.height != inputImage.height
- || outputImage.format != inputImage.format
- || outputImage.buffer == null) {
- outputImage.width = inputImage.width;
- outputImage.height = inputImage.height;
- outputImage.format = inputImage.format;
- outputImage.buffer = ByteBuffer.allocate(inputImage.width * inputImage.height);
- }
-
- sobel(outputImage.buffer, inputImage.buffer, inputImage.width, inputImage.height);
-
- return true;
+ /**
+ * Detects edges from the input grayscale image.
+ *
+ * @param outputImage Output image buffer, which has a size of width * height.
+ * @param inputImage Input image.
+ * @return False if the outputImage buffer is too small, True otherwise.
+ */
+ public static boolean detect(CameraImageBuffer outputImage, CameraImageBuffer inputImage) {
+ if (inputImage == null || inputImage.format != CameraImageBuffer.IMAGE_FORMAT_I8) {
+ Log.e(TAG, "Invalid input image!");
+ return false;
}
- private static void sobel(
- ByteBuffer outputBuffer, ByteBuffer inputBuffer, int width, int height) {
- // Adjust buffer size if necessary.
- final int bufferSize = width * height;
- if (s_ImageBuffer.length < bufferSize || bufferSize < s_ImageBufferSize) {
- s_ImageBuffer = new byte[bufferSize];
- s_ImageBufferSize = bufferSize;
- }
-
- inputBuffer.position(0);
- inputBuffer.get(s_ImageBuffer);
-
- outputBuffer.position(0);
- byte[] outputPixel = outputBuffer.array();
-
- // Detect edges.
- int threshold = 128 * 128;
-
- for (int j = 1; j < height - 1; j++) {
- for (int i = 1; i < width - 1; i++) {
- // Offset of the pixel at [i, j] of the input image.
- int offset = (j * width) + i;
-
- // Neighbour pixels around the pixel at [i, j].
- int a00 = s_ImageBuffer[offset - width - 1];
- int a01 = s_ImageBuffer[offset - width];
- int a02 = s_ImageBuffer[offset - width + 1];
- int a10 = s_ImageBuffer[offset - 1];
- int a12 = s_ImageBuffer[offset + 1];
- int a20 = s_ImageBuffer[offset + width - 1];
- int a21 = s_ImageBuffer[offset + width];
- int a22 = s_ImageBuffer[offset + width + 1];
-
- // Sobel X filter:
- // -1, 0, 1,
- // -2, 0, 2,
- // -1, 0, 1
- int xSum = -a00 - (2 * a10) - a20 + a02 + (2 * a12) + a22;
-
- // Sobel Y filter:
- // 1, 2, 1,
- // 0, 0, 0,
- // -1, -2, -1
- int ySum = a00 + (2 * a01) + a02 - a20 - (2 * a21) - a22;
-
- if ((xSum * xSum) + (ySum * ySum) > threshold) {
- outputPixel[(j * width) + i] = (byte) 0xFF;
- } else {
- outputPixel[(j * width) + i] = (byte) 0x1F;
- }
- }
- }
+ if (outputImage == null) {
+ Log.e(TAG, "Invalid output image!");
+ return false;
}
+
+ // Recreate output image buffer if it is different from input image buffer.
+ if (outputImage.width != inputImage.width
+ || outputImage.height != inputImage.height
+ || outputImage.format != inputImage.format
+ || outputImage.buffer == null) {
+ outputImage.width = inputImage.width;
+ outputImage.height = inputImage.height;
+ outputImage.format = inputImage.format;
+ outputImage.buffer = ByteBuffer.allocate(inputImage.width * inputImage.height);
+ }
+
+ sobel(outputImage.buffer, inputImage.buffer, inputImage.width, inputImage.height);
+
+ return true;
+ }
+
+ private static void sobel(
+ ByteBuffer outputBuffer, ByteBuffer inputBuffer, int width, int height) {
+ // Adjust buffer size if necessary.
+ final int bufferSize = width * height;
+ if (s_ImageBuffer.length < bufferSize || bufferSize < s_ImageBufferSize) {
+ s_ImageBuffer = new byte[bufferSize];
+ s_ImageBufferSize = bufferSize;
+ }
+
+ inputBuffer.position(0);
+ inputBuffer.get(s_ImageBuffer);
+
+ outputBuffer.position(0);
+ byte[] outputPixel = outputBuffer.array();
+
+ // Detect edges.
+ int threshold = 128 * 128;
+
+ for (int j = 1; j < height - 1; j++) {
+ for (int i = 1; i < width - 1; i++) {
+ // Offset of the pixel at [i, j] of the input image.
+ int offset = (j * width) + i;
+
+ // Neighbour pixels around the pixel at [i, j].
+ int a00 = s_ImageBuffer[offset - width - 1];
+ int a01 = s_ImageBuffer[offset - width];
+ int a02 = s_ImageBuffer[offset - width + 1];
+ int a10 = s_ImageBuffer[offset - 1];
+ int a12 = s_ImageBuffer[offset + 1];
+ int a20 = s_ImageBuffer[offset + width - 1];
+ int a21 = s_ImageBuffer[offset + width];
+ int a22 = s_ImageBuffer[offset + width + 1];
+
+ // Sobel X filter:
+ // -1, 0, 1,
+ // -2, 0, 2,
+ // -1, 0, 1
+ int xSum = -a00 - (2 * a10) - a20 + a02 + (2 * a12) + a22;
+
+ // Sobel Y filter:
+ // 1, 2, 1,
+ // 0, 0, 0,
+ // -1, -2, -1
+ int ySum = a00 + (2 * a01) + a02 - a20 - (2 * a21) - a22;
+
+ if ((xSum * xSum) + (ySum * ySum) > threshold) {
+ outputPixel[(j * width) + i] = (byte) 0xFF;
+ } else {
+ outputPixel[(j * width) + i] = (byte) 0x1F;
+ }
+ }
+ }
+ }
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/TextureReader.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/TextureReader.java
index 83532b3..30ca19a 100644
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/TextureReader.java
+++ b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/TextureReader.java
@@ -60,434 +60,423 @@
* release the internal resource when you are done with the reader.
*/
public class TextureReader {
- private static final String TAG = TextureReader.class.getSimpleName();
+ private static final String TAG = TextureReader.class.getSimpleName();
- // By default, we create only two internal buffers. So you can only hold more than one buffer
- // index in your app without releasing it. If you need to hold more than one buffers, you can
- // increase the mBufferCount member.
- private final int mBufferCount = 2;
- private int[] mFrameBuffer;
- private int[] mTexture;
- private int[] mPBO;
- private Boolean[] mBufferUsed;
- private int mFrontIndex = -1;
- private int mBackIndex = -1;
+ // By default, we create only two internal buffers. So you can only hold more than one buffer
+ // index in your app without releasing it. If you need to hold more than one buffers, you can
+ // increase the bufferCount member.
+ private final int bufferCount = 2;
+ private int[] frameBuffer;
+ private int[] texture;
+ private int[] pbo;
+ private Boolean[] bufferUsed;
+ private int frontIndex = -1;
+ private int backIndex = -1;
- // By default, the output image format is set to RGBA. You can also set it to IMAGE_FORMAT_I8.
- private int mImageFormat = CameraImageBuffer.IMAGE_FORMAT_RGBA;
- private int mImageWidth = 0;
- private int mImageHeight = 0;
- private int mPixelBufferSize = 0;
- private Boolean mKeepAspectRatio = false;
+ // By default, the output image format is set to RGBA. You can also set it to IMAGE_FORMAT_I8.
+ private int imageFormat = CameraImageBuffer.IMAGE_FORMAT_RGBA;
+ private int imageWidth = 0;
+ private int imageHeight = 0;
+ private int pixelBufferSize = 0;
+ private Boolean keepAspectRatio = false;
- private FloatBuffer mQuadVertices;
- private FloatBuffer mQuadTexCoord;
- private int mQuadProgram;
- private int mQuadPositionAttrib;
- private int mQuadTexCoordAttrib;
- private static final int COORDS_PER_VERTEX = 3;
- private static final int TEXCOORDS_PER_VERTEX = 2;
- private static final int FLOAT_SIZE = 4;
- private static final float[] QUAD_COORDS =
- new float[] {
- -1.0f, -1.0f, 0.0f,
- -1.0f, +1.0f, 0.0f,
- +1.0f, -1.0f, 0.0f,
- +1.0f, +1.0f, 0.0f,
- };
+ private FloatBuffer quadVertices;
+ private FloatBuffer quadTexCoord;
+ private int quadProgram;
+ private int quadPositionAttrib;
+ private int quadTexCoordAttrib;
+ private static final int COORDS_PER_VERTEX = 3;
+ private static final int TEXCOORDS_PER_VERTEX = 2;
+ private static final int FLOAT_SIZE = 4;
+ private static final float[] QUAD_COORDS =
+ new float[] {
+ -1.0f, -1.0f, 0.0f, -1.0f, +1.0f, 0.0f, +1.0f, -1.0f, 0.0f, +1.0f, +1.0f, 0.0f,
+ };
- private static final float[] QUAD_TEXCOORDS =
- new float[] {
- 0.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 0.0f,
- 1.0f, 1.0f,
- };
+ private static final float[] QUAD_TEXCOORDS =
+ new float[] {
+ 0.0f, 0.0f,
+ 0.0f, 1.0f,
+ 1.0f, 0.0f,
+ 1.0f, 1.0f,
+ };
- private static final String QUAD_RENDERING_VERTEX_SHADER =
- "// Vertex shader.\n"
- + "attribute vec4 a_Position;\n"
- + "attribute vec2 a_TexCoord;\n"
- + "varying vec2 v_TexCoord;\n"
- + "void main() {\n"
- + " gl_Position = a_Position;\n"
- + " v_TexCoord = a_TexCoord;\n"
- + "}";
+ private static final String QUAD_RENDERING_VERTEX_SHADER =
+ "// Vertex shader.\n"
+ + "attribute vec4 a_Position;\n"
+ + "attribute vec2 a_TexCoord;\n"
+ + "varying vec2 v_TexCoord;\n"
+ + "void main() {\n"
+ + " gl_Position = a_Position;\n"
+ + " v_TexCoord = a_TexCoord;\n"
+ + "}";
- private static final String QUAD_RENDERING_FRAGMENT_SHADER_RGBA =
- "// Fragment shader that renders to a RGBA texture.\n"
- + "#extension GL_OES_EGL_image_external : require\n"
- + "precision mediump float;\n"
- + "varying vec2 v_TexCoord;\n"
- + "uniform samplerExternalOES sTexture;\n"
- + "void main() {\n"
- + " gl_FragColor = texture2D(sTexture, v_TexCoord);\n"
- + "}";
+ private static final String QUAD_RENDERING_FRAGMENT_SHADER_RGBA =
+ "// Fragment shader that renders to a RGBA texture.\n"
+ + "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 v_TexCoord;\n"
+ + "uniform samplerExternalOES sTexture;\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(sTexture, v_TexCoord);\n"
+ + "}";
- private static final String QUAD_RENDERING_FRAGMENT_SHADER_I8 =
- "// Fragment shader that renders to a grayscale texture.\n"
- + "#extension GL_OES_EGL_image_external : require\n"
- + "precision mediump float;\n"
- + "varying vec2 v_TexCoord;\n"
- + "uniform samplerExternalOES sTexture;\n"
- + "void main() {\n"
- + " vec4 color = texture2D(sTexture, v_TexCoord);\n"
- + " gl_FragColor.r = color.r * 0.299 + color.g * 0.587 + color.b * 0.114;\n"
- + "}";
+ private static final String QUAD_RENDERING_FRAGMENT_SHADER_I8 =
+ "// Fragment shader that renders to a grayscale texture.\n"
+ + "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 v_TexCoord;\n"
+ + "uniform samplerExternalOES sTexture;\n"
+ + "void main() {\n"
+ + " vec4 color = texture2D(sTexture, v_TexCoord);\n"
+ + " gl_FragColor.r = color.r * 0.299 + color.g * 0.587 + color.b * 0.114;\n"
+ + "}";
- /**
- * Creates the texture reader.
- * This function needs to be called from the OpenGL rendering thread.
- *
- * @param format the format of the output pixel buffer. It can be one of the two values:
- * CameraImageBuffer.IMAGE_FORMAT_RGBA or CameraImageBuffer.IMAGE_FORMAT_I8.
- * @param width the width of the output image.
- * @param height the height of the output image.
- * @param keepAspectRatio whether or not to keep aspect ratio. If true, the output image may be
- * cropped if the image aspect ratio is different from the texture aspect ratio. If false,
- * the output image covers the entire texture scope and no cropping is applied.
- */
- public void create(int format, int width, int height, Boolean keepAspectRatio) {
- if (format != CameraImageBuffer.IMAGE_FORMAT_RGBA
- && format != CameraImageBuffer.IMAGE_FORMAT_I8) {
- throw new RuntimeException("Image format not supported.");
- }
-
- mKeepAspectRatio = keepAspectRatio;
- mImageFormat = format;
- mImageWidth = width;
- mImageHeight = height;
- mFrontIndex = -1;
- mBackIndex = -1;
-
- if (mImageFormat == CameraImageBuffer.IMAGE_FORMAT_RGBA) {
- mPixelBufferSize = mImageWidth * mImageHeight * 4;
- } else if (mImageFormat == CameraImageBuffer.IMAGE_FORMAT_I8) {
- mPixelBufferSize = mImageWidth * mImageHeight;
- }
-
- // Create framebuffers and PBOs.
- mPBO = new int[mBufferCount];
- mFrameBuffer = new int[mBufferCount];
- mTexture = new int[mBufferCount];
- mBufferUsed = new Boolean[mBufferCount];
- GLES30.glGenBuffers(mBufferCount, mPBO, 0);
- GLES20.glGenFramebuffers(mBufferCount, mFrameBuffer, 0);
- GLES20.glGenTextures(mBufferCount, mTexture, 0);
-
- for (int i = 0; i < mBufferCount; i++) {
- mBufferUsed[i] = false;
- GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer[i]);
-
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTexture[i]);
- GLES30.glTexImage2D(
- GLES30.GL_TEXTURE_2D,
- 0,
- mImageFormat == CameraImageBuffer.IMAGE_FORMAT_I8 ? GLES30.GL_R8 : GLES30.GL_RGBA,
- mImageWidth,
- mImageHeight,
- 0,
- mImageFormat == CameraImageBuffer.IMAGE_FORMAT_I8 ? GLES30.GL_RED : GLES30.GL_RGBA,
- GLES30.GL_UNSIGNED_BYTE,
- null);
- GLES20.glTexParameteri(
- GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(
- GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
- GLES20.GL_LINEAR);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
- GLES20.GL_LINEAR);
- GLES20.glFramebufferTexture2D(
- GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D,
- mTexture[i], 0);
-
- int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
- if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
- throw new RuntimeException(
- this
- + ": Failed to set up render buffer with status "
- + status
- + " and error "
- + GLES20.glGetError());
- }
-
- // Setup PBOs
- GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, mPBO[i]);
- GLES30.glBufferData(
- GLES30.GL_PIXEL_PACK_BUFFER, mPixelBufferSize, null, GLES30.GL_DYNAMIC_READ);
- GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, 0);
- }
-
- // Load shader program.
- int numVertices = 4;
- if (numVertices != QUAD_COORDS.length / COORDS_PER_VERTEX) {
- throw new RuntimeException("Unexpected number of vertices in BackgroundRenderer.");
- }
-
- ByteBuffer bbVertices = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE);
- bbVertices.order(ByteOrder.nativeOrder());
- mQuadVertices = bbVertices.asFloatBuffer();
- mQuadVertices.put(QUAD_COORDS);
- mQuadVertices.position(0);
-
- ByteBuffer bbTexCoords =
- ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
- bbTexCoords.order(ByteOrder.nativeOrder());
- mQuadTexCoord = bbTexCoords.asFloatBuffer();
- mQuadTexCoord.put(QUAD_TEXCOORDS);
- mQuadTexCoord.position(0);
-
- int vertexShader =
- ShaderUtil.loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, QUAD_RENDERING_VERTEX_SHADER);
- int fragmentShader =
- ShaderUtil.loadGLShader(
- TAG,
- GLES20.GL_FRAGMENT_SHADER,
- mImageFormat == CameraImageBuffer.IMAGE_FORMAT_I8
- ? QUAD_RENDERING_FRAGMENT_SHADER_I8
- : QUAD_RENDERING_FRAGMENT_SHADER_RGBA);
-
- mQuadProgram = GLES20.glCreateProgram();
- GLES20.glAttachShader(mQuadProgram, vertexShader);
- GLES20.glAttachShader(mQuadProgram, fragmentShader);
- GLES20.glLinkProgram(mQuadProgram);
- GLES20.glUseProgram(mQuadProgram);
-
- mQuadPositionAttrib = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
- mQuadTexCoordAttrib = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
- int texLoc = GLES20.glGetUniformLocation(mQuadProgram, "sTexture");
- GLES20.glUniform1i(texLoc, 0);
+ /**
+ * Creates the texture reader. This function needs to be called from the OpenGL rendering thread.
+ *
+ * @param format the format of the output pixel buffer. It can be one of the two values:
+ * CameraImageBuffer.IMAGE_FORMAT_RGBA or CameraImageBuffer.IMAGE_FORMAT_I8.
+ * @param width the width of the output image.
+ * @param height the height of the output image.
+ * @param keepAspectRatio whether or not to keep aspect ratio. If true, the output image may be
+ * cropped if the image aspect ratio is different from the texture aspect ratio. If false, the
+ * output image covers the entire texture scope and no cropping is applied.
+ */
+ public void create(int format, int width, int height, Boolean keepAspectRatio) {
+ if (format != CameraImageBuffer.IMAGE_FORMAT_RGBA
+ && format != CameraImageBuffer.IMAGE_FORMAT_I8) {
+ throw new RuntimeException("Image format not supported.");
}
- /** Destroy the texture reader. */
- public void destroy() {
- if (mFrameBuffer != null) {
- GLES20.glDeleteFramebuffers(mBufferCount, mFrameBuffer, 0);
- mFrameBuffer = null;
- }
- if (mTexture != null) {
- GLES20.glDeleteTextures(mBufferCount, mTexture, 0);
- mTexture = null;
- }
- if (mPBO != null) {
- GLES30.glDeleteBuffers(mBufferCount, mPBO, 0);
- mPBO = null;
- }
+ this.keepAspectRatio = keepAspectRatio;
+ imageFormat = format;
+ imageWidth = width;
+ imageHeight = height;
+ frontIndex = -1;
+ backIndex = -1;
+
+ if (imageFormat == CameraImageBuffer.IMAGE_FORMAT_RGBA) {
+ pixelBufferSize = imageWidth * imageHeight * 4;
+ } else if (imageFormat == CameraImageBuffer.IMAGE_FORMAT_I8) {
+ pixelBufferSize = imageWidth * imageHeight;
}
- /**
- * Submits a frame reading request. This routine does not return the result frame buffer
- * immediately. Instead, it returns a frame buffer index, which can be used to acquire the frame
- * buffer later through acquireFrame().
- *
- * <p>If there is no enough frame buffer available, an exception will be thrown.
- *
- * @param textureId the id of the input OpenGL texture.
- * @param textureWidth width of the texture in pixels.
- * @param textureHeight height of the texture in pixels.
- * @return the index to the frame buffer this request is associated to. You should use this
- * index to acquire the frame using acquireFrame(); and you should release the frame buffer
- * using releaseBuffer() routine after using of the frame.
- */
- public int submitFrame(int textureId, int textureWidth, int textureHeight) {
- // Find next buffer.
- int bufferIndex = -1;
- for (int i = 0; i < mBufferCount; i++) {
- if (!mBufferUsed[i]) {
- bufferIndex = i;
- break;
- }
- }
- if (bufferIndex == -1) {
- throw new RuntimeException("No buffer available.");
- }
+ // Create framebuffers and PBOs.
+ pbo = new int[bufferCount];
+ frameBuffer = new int[bufferCount];
+ texture = new int[bufferCount];
+ bufferUsed = new Boolean[bufferCount];
+ GLES30.glGenBuffers(bufferCount, pbo, 0);
+ GLES20.glGenFramebuffers(bufferCount, frameBuffer, 0);
+ GLES20.glGenTextures(bufferCount, texture, 0);
- // Bind both read and write to framebuffer.
- GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer[bufferIndex]);
+ for (int i = 0; i < bufferCount; i++) {
+ bufferUsed[i] = false;
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[i]);
- // Save and setup viewport
- IntBuffer viewport = IntBuffer.allocate(4);
- GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, viewport);
- GLES20.glViewport(0, 0, mImageWidth, mImageHeight);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[i]);
+ GLES30.glTexImage2D(
+ GLES30.GL_TEXTURE_2D,
+ 0,
+ imageFormat == CameraImageBuffer.IMAGE_FORMAT_I8 ? GLES30.GL_R8 : GLES30.GL_RGBA,
+ imageWidth,
+ imageHeight,
+ 0,
+ imageFormat == CameraImageBuffer.IMAGE_FORMAT_I8 ? GLES30.GL_RED : GLES30.GL_RGBA,
+ GLES30.GL_UNSIGNED_BYTE,
+ null);
+ GLES20.glTexParameteri(
+ GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(
+ GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glFramebufferTexture2D(
+ GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texture[i], 0);
- // Draw texture to framebuffer.
- drawTexture(textureId, textureWidth, textureHeight);
+ int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+ if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ throw new RuntimeException(
+ this
+ + ": Failed to set up render buffer with status "
+ + status
+ + " and error "
+ + GLES20.glGetError());
+ }
- // Start reading into PBO
- GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, mPBO[bufferIndex]);
- GLES30.glReadBuffer(GLES30.GL_COLOR_ATTACHMENT0);
-
- GLES30.glReadPixels(
- 0,
- 0,
- mImageWidth,
- mImageHeight,
- mImageFormat == CameraImageBuffer.IMAGE_FORMAT_I8 ? GLES30.GL_RED : GLES20.GL_RGBA,
- GLES20.GL_UNSIGNED_BYTE,
- 0);
-
- // Restore viewport.
- GLES20.glViewport(viewport.get(0), viewport.get(1), viewport.get(2), viewport.get(3));
-
- GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
- GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, 0);
-
- mBufferUsed[bufferIndex] = true;
- return bufferIndex;
+ // Setup PBOs
+ GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, pbo[i]);
+ GLES30.glBufferData(
+ GLES30.GL_PIXEL_PACK_BUFFER, pixelBufferSize, null, GLES30.GL_DYNAMIC_READ);
+ GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, 0);
}
- /**
- * Acquires the frame requested earlier. This routine returns a CameraImageBuffer object that
- * contains the pixels mapped to the frame buffer requested previously through submitFrame().
- *
- * <p>If input buffer index is invalid, an exception will be thrown.
- *
- * @param bufferIndex the index to the frame buffer to be acquired. It has to be a frame index
- * returned from submitFrame().
- * @return a CameraImageBuffer object if succeed. Null otherwise.
- */
- public CameraImageBuffer acquireFrame(int bufferIndex) {
- if (bufferIndex < 0 || bufferIndex >= mBufferCount || !mBufferUsed[bufferIndex]) {
- throw new RuntimeException("Invalid buffer index.");
- }
-
- // Bind the current PB and acquire the pixel buffer.
- GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, mPBO[bufferIndex]);
- ByteBuffer mapped =
- (ByteBuffer)
- GLES30.glMapBufferRange(
- GLES30.GL_PIXEL_PACK_BUFFER, 0, mPixelBufferSize, GLES30.GL_MAP_READ_BIT);
-
- // Wrap the mapped buffer into CameraImageBuffer object.
- CameraImageBuffer buffer =
- new CameraImageBuffer(mImageWidth, mImageHeight, mImageFormat, mapped);
-
- return buffer;
+ // Load shader program.
+ int numVertices = 4;
+ if (numVertices != QUAD_COORDS.length / COORDS_PER_VERTEX) {
+ throw new RuntimeException("Unexpected number of vertices in BackgroundRenderer.");
}
- /**
- * Releases a previously requested frame buffer. If input buffer index is invalid, an exception
- * will be thrown.
- *
- * @param bufferIndex the index to the frame buffer to be acquired. It has to be a frame index
- * returned from submitFrame().
- */
- public void releaseFrame(int bufferIndex) {
- if (bufferIndex < 0 || bufferIndex >= mBufferCount || !mBufferUsed[bufferIndex]) {
- throw new RuntimeException("Invalid buffer index.");
- }
- GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, mPBO[bufferIndex]);
- GLES30.glUnmapBuffer(GLES30.GL_PIXEL_PACK_BUFFER);
- GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, 0);
- mBufferUsed[bufferIndex] = false;
+ ByteBuffer bbVertices = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE);
+ bbVertices.order(ByteOrder.nativeOrder());
+ quadVertices = bbVertices.asFloatBuffer();
+ quadVertices.put(QUAD_COORDS);
+ quadVertices.position(0);
+
+ ByteBuffer bbTexCoords =
+ ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
+ bbTexCoords.order(ByteOrder.nativeOrder());
+ quadTexCoord = bbTexCoords.asFloatBuffer();
+ quadTexCoord.put(QUAD_TEXCOORDS);
+ quadTexCoord.position(0);
+
+ int vertexShader =
+ ShaderUtil.loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, QUAD_RENDERING_VERTEX_SHADER);
+ int fragmentShader =
+ ShaderUtil.loadGLShader(
+ TAG,
+ GLES20.GL_FRAGMENT_SHADER,
+ imageFormat == CameraImageBuffer.IMAGE_FORMAT_I8
+ ? QUAD_RENDERING_FRAGMENT_SHADER_I8
+ : QUAD_RENDERING_FRAGMENT_SHADER_RGBA);
+
+ quadProgram = GLES20.glCreateProgram();
+ GLES20.glAttachShader(quadProgram, vertexShader);
+ GLES20.glAttachShader(quadProgram, fragmentShader);
+ GLES20.glLinkProgram(quadProgram);
+ GLES20.glUseProgram(quadProgram);
+
+ quadPositionAttrib = GLES20.glGetAttribLocation(quadProgram, "a_Position");
+ quadTexCoordAttrib = GLES20.glGetAttribLocation(quadProgram, "a_TexCoord");
+ int texLoc = GLES20.glGetUniformLocation(quadProgram, "sTexture");
+ GLES20.glUniform1i(texLoc, 0);
+ }
+
+ /** Destroy the texture reader. */
+ public void destroy() {
+ if (frameBuffer != null) {
+ GLES20.glDeleteFramebuffers(bufferCount, frameBuffer, 0);
+ frameBuffer = null;
+ }
+ if (texture != null) {
+ GLES20.glDeleteTextures(bufferCount, texture, 0);
+ texture = null;
+ }
+ if (pbo != null) {
+ GLES30.glDeleteBuffers(bufferCount, pbo, 0);
+ pbo = null;
+ }
+ }
+
+ /**
+ * Submits a frame reading request. This routine does not return the result frame buffer
+ * immediately. Instead, it returns a frame buffer index, which can be used to acquire the frame
+ * buffer later through acquireFrame().
+ *
+ * <p>If there is no enough frame buffer available, an exception will be thrown.
+ *
+ * @param textureId the id of the input OpenGL texture.
+ * @param textureWidth width of the texture in pixels.
+ * @param textureHeight height of the texture in pixels.
+ * @return the index to the frame buffer this request is associated to. You should use this index
+ * to acquire the frame using acquireFrame(); and you should release the frame buffer using
+ * releaseBuffer() routine after using of the frame.
+ */
+ public int submitFrame(int textureId, int textureWidth, int textureHeight) {
+ // Find next buffer.
+ int bufferIndex = -1;
+ for (int i = 0; i < bufferCount; i++) {
+ if (!bufferUsed[i]) {
+ bufferIndex = i;
+ break;
+ }
+ }
+ if (bufferIndex == -1) {
+ throw new RuntimeException("No buffer available.");
}
- /**
- * Reads pixels using dual buffers. This function sends the reading request to GPU and returns
- * the result from the previous call. Thus, the first call always returns null. The pixelBuffer
- * member in the returned object maps to the internal buffer. This buffer cannot be overrode,
- * and it becomes invalid after next call to submitAndAcquire().
- *
- * @param textureId the OpenGL texture Id.
- * @param textureWidth width of the texture in pixels.
- * @param textureHeight height of the texture in pixels.
- * @return a CameraImageBuffer object that contains the pixels read from the texture.
- */
- public CameraImageBuffer submitAndAcquire(int textureId, int textureWidth, int textureHeight) {
- // Release previously used front buffer.
- if (mFrontIndex != -1) {
- releaseFrame(mFrontIndex);
- }
+ // Bind both read and write to framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[bufferIndex]);
- // Move previous back buffer to front buffer.
- mFrontIndex = mBackIndex;
+ // Save and setup viewport
+ IntBuffer viewport = IntBuffer.allocate(4);
+ GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, viewport);
+ GLES20.glViewport(0, 0, imageWidth, imageHeight);
- // Submit new request on back buffer.
- mBackIndex = submitFrame(textureId, textureWidth, textureHeight);
+ // Draw texture to framebuffer.
+ drawTexture(textureId, textureWidth, textureHeight);
- // Acquire frame from the new front buffer.
- if (mFrontIndex != -1) {
- return acquireFrame(mFrontIndex);
- }
+ // Start reading into PBO
+ GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, pbo[bufferIndex]);
+ GLES30.glReadBuffer(GLES30.GL_COLOR_ATTACHMENT0);
- return null;
+ GLES30.glReadPixels(
+ 0,
+ 0,
+ imageWidth,
+ imageHeight,
+ imageFormat == CameraImageBuffer.IMAGE_FORMAT_I8 ? GLES30.GL_RED : GLES20.GL_RGBA,
+ GLES20.GL_UNSIGNED_BYTE,
+ 0);
+
+ // Restore viewport.
+ GLES20.glViewport(viewport.get(0), viewport.get(1), viewport.get(2), viewport.get(3));
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, 0);
+
+ bufferUsed[bufferIndex] = true;
+ return bufferIndex;
+ }
+
+ /**
+ * Acquires the frame requested earlier. This routine returns a CameraImageBuffer object that
+ * contains the pixels mapped to the frame buffer requested previously through submitFrame().
+ *
+ * <p>If input buffer index is invalid, an exception will be thrown.
+ *
+ * @param bufferIndex the index to the frame buffer to be acquired. It has to be a frame index
+ * returned from submitFrame().
+ * @return a CameraImageBuffer object if succeed. Null otherwise.
+ */
+ public CameraImageBuffer acquireFrame(int bufferIndex) {
+ if (bufferIndex < 0 || bufferIndex >= bufferCount || !bufferUsed[bufferIndex]) {
+ throw new RuntimeException("Invalid buffer index.");
}
- /** Draws texture to full screen. */
- private void drawTexture(int textureId, int textureWidth, int textureHeight) {
- // Disable features that we don't use.
- GLES20.glDisable(GLES20.GL_DEPTH_TEST);
- GLES20.glDisable(GLES20.GL_CULL_FACE);
- GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
- GLES20.glDisable(GLES20.GL_STENCIL_TEST);
- GLES20.glDisable(GLES20.GL_BLEND);
- GLES20.glDepthMask(false);
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
- GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
- GLES30.glBindVertexArray(0);
+ // Bind the current PB and acquire the pixel buffer.
+ GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, pbo[bufferIndex]);
+ ByteBuffer mapped =
+ (ByteBuffer)
+ GLES30.glMapBufferRange(
+ GLES30.GL_PIXEL_PACK_BUFFER, 0, pixelBufferSize, GLES30.GL_MAP_READ_BIT);
- // Clear buffers.
- GLES20.glClearColor(0, 0, 0, 0);
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+ // Wrap the mapped buffer into CameraImageBuffer object.
+ CameraImageBuffer buffer = new CameraImageBuffer(imageWidth, imageHeight, imageFormat, mapped);
- // Set the vertex positions.
- GLES20.glVertexAttribPointer(
- mQuadPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices);
+ return buffer;
+ }
- // Calculate the texture coordinates.
- if (mKeepAspectRatio) {
- int renderWidth = 0;
- int renderHeight = 0;
- float textureAspectRatio = (float) (textureWidth) / textureHeight;
- float imageAspectRatio = (float) (mImageWidth) / mImageHeight;
- if (textureAspectRatio < imageAspectRatio) {
- renderWidth = mImageWidth;
- renderHeight = textureHeight * mImageWidth / textureWidth;
- } else {
- renderWidth = textureWidth * mImageHeight / textureHeight;
- renderHeight = mImageHeight;
- }
- float offsetU = (float) (renderWidth - mImageWidth) / renderWidth / 2;
- float offsetV = (float) (renderHeight - mImageHeight) / renderHeight / 2;
-
- float[] texCoords =
- new float[] {
- offsetU, offsetV,
- offsetU, 1 - offsetV,
- 1 - offsetU, offsetV,
- 1 - offsetU, 1 - offsetV
- };
-
- mQuadTexCoord.put(texCoords);
- mQuadTexCoord.position(0);
- } else {
- mQuadTexCoord.put(QUAD_TEXCOORDS);
- mQuadTexCoord.position(0);
- }
-
- // Set the texture coordinates.
- GLES20.glVertexAttribPointer(
- mQuadTexCoordAttrib, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadTexCoord);
-
- // Enable vertex arrays
- GLES20.glEnableVertexAttribArray(mQuadPositionAttrib);
- GLES20.glEnableVertexAttribArray(mQuadTexCoordAttrib);
-
- GLES20.glUseProgram(mQuadProgram);
-
- // Select input texture.
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
-
- // Draw a quad with texture.
- GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
-
- // Disable vertex arrays
- GLES20.glDisableVertexAttribArray(mQuadPositionAttrib);
- GLES20.glDisableVertexAttribArray(mQuadTexCoordAttrib);
-
- // Reset texture binding.
- GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ /**
+ * Releases a previously requested frame buffer. If input buffer index is invalid, an exception
+ * will be thrown.
+ *
+ * @param bufferIndex the index to the frame buffer to be acquired. It has to be a frame index
+ * returned from submitFrame().
+ */
+ public void releaseFrame(int bufferIndex) {
+ if (bufferIndex < 0 || bufferIndex >= bufferCount || !bufferUsed[bufferIndex]) {
+ throw new RuntimeException("Invalid buffer index.");
}
+ GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, pbo[bufferIndex]);
+ GLES30.glUnmapBuffer(GLES30.GL_PIXEL_PACK_BUFFER);
+ GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, 0);
+ bufferUsed[bufferIndex] = false;
+ }
+
+ /**
+ * Reads pixels using dual buffers. This function sends the reading request to GPU and returns the
+ * result from the previous call. Thus, the first call always returns null. The pixelBuffer member
+ * in the returned object maps to the internal buffer. This buffer cannot be overrode, and it
+ * becomes invalid after next call to submitAndAcquire().
+ *
+ * @param textureId the OpenGL texture Id.
+ * @param textureWidth width of the texture in pixels.
+ * @param textureHeight height of the texture in pixels.
+ * @return a CameraImageBuffer object that contains the pixels read from the texture.
+ */
+ public CameraImageBuffer submitAndAcquire(int textureId, int textureWidth, int textureHeight) {
+ // Release previously used front buffer.
+ if (frontIndex != -1) {
+ releaseFrame(frontIndex);
+ }
+
+ // Move previous back buffer to front buffer.
+ frontIndex = backIndex;
+
+ // Submit new request on back buffer.
+ backIndex = submitFrame(textureId, textureWidth, textureHeight);
+
+ // Acquire frame from the new front buffer.
+ if (frontIndex != -1) {
+ return acquireFrame(frontIndex);
+ }
+
+ return null;
+ }
+
+ /** Draws texture to full screen. */
+ private void drawTexture(int textureId, int textureWidth, int textureHeight) {
+ // Disable features that we don't use.
+ GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+ GLES20.glDisable(GLES20.GL_CULL_FACE);
+ GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
+ GLES20.glDisable(GLES20.GL_STENCIL_TEST);
+ GLES20.glDisable(GLES20.GL_BLEND);
+ GLES20.glDepthMask(false);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
+ GLES30.glBindVertexArray(0);
+
+ // Clear buffers.
+ GLES20.glClearColor(0, 0, 0, 0);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+
+ // Set the vertex positions.
+ GLES20.glVertexAttribPointer(
+ quadPositionAttrib, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices);
+
+ // Calculate the texture coordinates.
+ if (keepAspectRatio) {
+ int renderWidth = 0;
+ int renderHeight = 0;
+ float textureAspectRatio = (float) (textureWidth) / textureHeight;
+ float imageAspectRatio = (float) (imageWidth) / imageHeight;
+ if (textureAspectRatio < imageAspectRatio) {
+ renderWidth = imageWidth;
+ renderHeight = textureHeight * imageWidth / textureWidth;
+ } else {
+ renderWidth = textureWidth * imageHeight / textureHeight;
+ renderHeight = imageHeight;
+ }
+ float offsetU = (float) (renderWidth - imageWidth) / renderWidth / 2;
+ float offsetV = (float) (renderHeight - imageHeight) / renderHeight / 2;
+
+ float[] texCoords =
+ new float[] {
+ offsetU, offsetV, offsetU, 1 - offsetV, 1 - offsetU, offsetV, 1 - offsetU, 1 - offsetV
+ };
+
+ quadTexCoord.put(texCoords);
+ quadTexCoord.position(0);
+ } else {
+ quadTexCoord.put(QUAD_TEXCOORDS);
+ quadTexCoord.position(0);
+ }
+
+ // Set the texture coordinates.
+ GLES20.glVertexAttribPointer(
+ quadTexCoordAttrib, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadTexCoord);
+
+ // Enable vertex arrays
+ GLES20.glEnableVertexAttribArray(quadPositionAttrib);
+ GLES20.glEnableVertexAttribArray(quadTexCoordAttrib);
+
+ GLES20.glUseProgram(quadProgram);
+
+ // Select input texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+
+ // Draw a quad with texture.
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Disable vertex arrays
+ GLES20.glDisableVertexAttribArray(quadPositionAttrib);
+ GLES20.glDisableVertexAttribArray(quadTexCoordAttrib);
+
+ // Reset texture binding.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
}
diff --git a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/package-info.java b/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/package-info.java
deleted file mode 100644
index 13e78a8..0000000
--- a/samples/computervision/app/src/main/java/com/google/ar/core/examples/java/computervision/utility/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2017 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * A sample showing how to build a computer vision application using ARCore.
- */
-package com.google.ar.core.examples.java.computervision.utility;
diff --git a/samples/computervision/app/src/main/res/layout/activity_main.xml b/samples/computervision/app/src/main/res/layout/activity_main.xml
index 8f7cb9a..5c8e2fa 100644
--- a/samples/computervision/app/src/main/res/layout/activity_main.xml
+++ b/samples/computervision/app/src/main/res/layout/activity_main.xml
@@ -19,10 +19,10 @@
android:layout_height="match_parent"
tools:context="com.google.ar.core.examples.java.computervision.MainActivity">
- <android.opengl.GLSurfaceView
- android:id="@+id/surfaceview"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent"
- android:layout_gravity="top" />
+ <android.opengl.GLSurfaceView
+ android:id="@+id/surfaceview"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layout_gravity="top"/>
</RelativeLayout>
diff --git a/samples/computervision/app/src/main/res/values/strings.xml b/samples/computervision/app/src/main/res/values/strings.xml
index 1a579d0..a980031 100644
--- a/samples/computervision/app/src/main/res/values/strings.xml
+++ b/samples/computervision/app/src/main/res/values/strings.xml
@@ -15,5 +15,5 @@
limitations under the License.
-->
<resources>
- <string name="app_name">CV Java</string>
+ <string name="app_name">CV Java</string>
</resources>
diff --git a/samples/computervision/app/src/main/res/values/styles.xml b/samples/computervision/app/src/main/res/values/styles.xml
index 0ecfbc5..3a71bd3 100644
--- a/samples/computervision/app/src/main/res/values/styles.xml
+++ b/samples/computervision/app/src/main/res/values/styles.xml
@@ -15,21 +15,21 @@
-->
<resources>
+ <!--
+ Base application theme, dependent on API level. This theme is replaced
+ by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
+ -->
+ <style name="AppBaseTheme" parent="android:Theme.Light">
<!--
- Base application theme, dependent on API level. This theme is replaced
- by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
+ Theme customizations available in newer API levels can go in
+ res/values-vXX/styles.xml, while customizations related to
+ backward-compatibility can go here.
-->
- <style name="AppBaseTheme" parent="android:Theme.Light">
- <!--
- Theme customizations available in newer API levels can go in
- res/values-vXX/styles.xml, while customizations related to
- backward-compatibility can go here.
- -->
- </style>
+ </style>
- <!-- Application theme. -->
- <style name="AppTheme" parent="AppBaseTheme">
- <!-- All customizations that are NOT specific to a particular API-level can go here. -->
- </style>
+ <!-- Application theme. -->
+ <style name="AppTheme" parent="AppBaseTheme">
+ <!-- All customizations that are NOT specific to a particular API-level can go here. -->
+ </style>
</resources>
diff --git a/samples/computervision/build.gradle b/samples/computervision/build.gradle
index 85691a6..be505f0 100644
--- a/samples/computervision/build.gradle
+++ b/samples/computervision/build.gradle
@@ -17,9 +17,6 @@
google()
jcenter()
mavenLocal()
- maven {
- url "${project.rootDir}/../../libraries/m2repository"
- }
}
}
diff --git a/samples/computervision/gradle/wrapper/gradle-wrapper.jar b/samples/computervision/gradle/wrapper/gradle-wrapper.jar
index 12a0871..7a3265e 100644
--- a/samples/computervision/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/computervision/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/hello_ar_c/app/build.gradle b/samples/hello_ar_c/app/build.gradle
index 06eeeb0..018d898 100644
--- a/samples/hello_ar_c/app/build.gradle
+++ b/samples/hello_ar_c/app/build.gradle
@@ -29,7 +29,7 @@
}
}
ndk {
- abiFilters "arm64-v8a"
+ abiFilters "arm64-v8a", "x86"
}
}
buildTypes {
@@ -47,8 +47,8 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:0.91.0'
- natives 'com.google.ar:core:0.91.0'
+ implementation 'com.google.ar:core:1.0.0'
+ natives 'com.google.ar:core:1.0.0'
implementation 'com.android.support:appcompat-v7:27.0.2'
implementation 'com.android.support:design:27.0.2'
@@ -57,11 +57,20 @@
// Extracts the shared libraries from aars in the natives configuration.
// This is done so that NDK builds can access these libraries.
task extractNativeLibraries() {
- configurations.natives.files.each { f ->
- copy {
- from zipTree(f)
- into arcore_libpath
- include "jni/**/*"
- }
- }
+ doFirst {
+ configurations.natives.files.each { f ->
+ copy {
+ from zipTree(f)
+ into arcore_libpath
+ include "jni/**/*"
+ }
+ }
+ }
}
+
+tasks.whenTaskAdded {
+ task-> if (task.name.contains("external") && !task.name.contains("Clean")) {
+ task.dependsOn(extractNativeLibraries)
+ }
+}
+
diff --git a/samples/hello_ar_c/app/src/main/AndroidManifest.xml b/samples/hello_ar_c/app/src/main/AndroidManifest.xml
index 5c57f5a..242b256 100644
--- a/samples/hello_ar_c/app/src/main/AndroidManifest.xml
+++ b/samples/hello_ar_c/app/src/main/AndroidManifest.xml
@@ -15,28 +15,34 @@
limitations under the License.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- package="com.google.ar.core.examples.c.helloar">
+ package="com.google.ar.core.examples.c.helloar">
- <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.CAMERA"/>
+ <!-- This tag indicates that this application requires ARCore. This results in the application
+ only being visible in the Google Play Store on devices that support ARCore. -->
+ <uses-feature android:name="android.hardware.camera.ar" android:required="true"/>
- <application
- android:allowBackup="true"
- android:icon="@drawable/ic_launcher"
- android:label="@string/app_name"
- android:theme="@style/AppTheme"
- android:usesCleartextTraffic="false">
+ <application
+ android:allowBackup="true"
+ android:icon="@drawable/ic_launcher"
+ android:label="@string/app_name"
+ android:theme="@style/AppTheme"
+ android:usesCleartextTraffic="false">
+ <!-- This tag indicates that this application requires ARCore. This results in the Google Play
+ Store downloading and installing ARCore along with the application. -->
+ <meta-data android:name="com.google.ar.core" android:value="required" />
- <activity
- android:name=".HelloArActivity"
- android:label="@string/app_name"
- android:configChanges="orientation|screenSize"
- android:exported="true"
- android:theme="@style/Theme.AppCompat.NoActionBar"
- android:screenOrientation="locked">
- <intent-filter>
- <action android:name="android.intent.action.MAIN" />
- <category android:name="android.intent.category.LAUNCHER" />
- </intent-filter>
- </activity>
- </application>
+ <activity
+ android:name=".HelloArActivity"
+ android:label="@string/app_name"
+ android:configChanges="orientation|screenSize"
+ android:exported="true"
+ android:theme="@style/Theme.AppCompat.NoActionBar"
+ android:screenOrientation="locked">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+ </activity>
+ </application>
</manifest>
diff --git a/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.cc b/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.cc
index d2ee96e..12d3b96 100644
--- a/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.cc
+++ b/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.cc
@@ -24,7 +24,11 @@
namespace hello_ar {
namespace {
+constexpr size_t kMaxNumberOfAndroidsToRender = 20;
constexpr int32_t kPlaneColorRgbaSize = 16;
+
+const glm::vec3 kWhite = {255, 255, 255};
+
constexpr std::array<uint32_t, kPlaneColorRgbaSize> kPlaneColorRgba = {
0xFFFFFFFF, 0xF44336FF, 0xE91E63FF, 0x9C27B0FF, 0x673AB7FF, 0x3F51B5FF,
0x2196F3FF, 0x03A9F4FF, 0x00BCD4FF, 0x009688FF, 0x4CAF50FF, 0x8BC34AFF,
@@ -38,46 +42,75 @@
}
} // namespace
-HelloArApplication::HelloArApplication(AAssetManager* asset_manager, void* env,
- void* context)
+HelloArApplication::HelloArApplication(AAssetManager* asset_manager)
: asset_manager_(asset_manager) {
LOGI("OnCreate()");
-
- // === ATTENTION! ATTENTION! ATTENTION! ===
- // This method can and will fail in user-facing situations. Your application
- // must handle these cases at least somewhat gracefully. See HelloAR Java
- // sample code for reasonable behavior.
- CHECK(ArSession_create(env, context, &ar_session_) == AR_SUCCESS);
- CHECK(ar_session_);
-
- ArConfig* ar_config = nullptr;
- ArConfig_create(ar_session_, &ar_config);
- CHECK(ar_config);
-
- const ArStatus status = ArSession_checkSupported(ar_session_, ar_config);
- CHECK(status == AR_SUCCESS);
-
- CHECK(ArSession_configure(ar_session_, ar_config) == AR_SUCCESS);
-
- ArConfig_destroy(ar_config);
-
- ArFrame_create(ar_session_, &ar_frame_);
- CHECK(ar_frame_);
}
HelloArApplication::~HelloArApplication() {
- ArSession_destroy(ar_session_);
- ArFrame_destroy(ar_frame_);
+ if (ar_session_ != nullptr) {
+ ArSession_destroy(ar_session_);
+ ArFrame_destroy(ar_frame_);
+ }
}
void HelloArApplication::OnPause() {
LOGI("OnPause()");
- ArSession_pause(ar_session_);
+ if (ar_session_ != nullptr) {
+ ArSession_pause(ar_session_);
+ }
}
-void HelloArApplication::OnResume() {
+void HelloArApplication::OnResume(void* env, void* context, void* activity) {
LOGI("OnResume()");
+ if (ar_session_ == nullptr) {
+ ArInstallStatus install_status;
+ // If install was not yet requested, that means that we are resuming the
+ // activity first time because of explicit user interaction (such as
+ // launching the application)
+ bool user_requested_install = !install_requested_;
+
+ // === ATTENTION! ATTENTION! ATTENTION! ===
+ // This method can and will fail in user-facing situations. Your
+ // application must handle these cases at least somewhat gracefully. See
+ // HelloAR Java sample code for reasonable behavior.
+ CHECK(ArCoreApk_requestInstall(env, activity, user_requested_install,
+ &install_status) == AR_SUCCESS);
+
+ switch (install_status) {
+ case AR_INSTALL_STATUS_INSTALLED:
+ break;
+ case AR_INSTALL_STATUS_INSTALL_REQUESTED:
+ install_requested_ = true;
+ return;
+ }
+
+ // === ATTENTION! ATTENTION! ATTENTION! ===
+ // This method can and will fail in user-facing situations. Your
+ // application must handle these cases at least somewhat gracefully. See
+ // HelloAR Java sample code for reasonable behavior.
+ CHECK(ArSession_create(env, context, &ar_session_) == AR_SUCCESS);
+ CHECK(ar_session_);
+
+ ArConfig* ar_config = nullptr;
+ ArConfig_create(ar_session_, &ar_config);
+ CHECK(ar_config);
+
+ const ArStatus status = ArSession_checkSupported(ar_session_, ar_config);
+ CHECK(status == AR_SUCCESS);
+
+ CHECK(ArSession_configure(ar_session_, ar_config) == AR_SUCCESS);
+
+ ArConfig_destroy(ar_config);
+
+ ArFrame_create(ar_session_, &ar_frame_);
+ CHECK(ar_frame_);
+
+ ArSession_setDisplayGeometry(ar_session_, display_rotation_, width_,
+ height_);
+ }
+
const ArStatus status = ArSession_resume(ar_session_);
CHECK(status == AR_SUCCESS);
}
@@ -86,8 +119,6 @@
LOGI("OnSurfaceCreated()");
background_renderer_.InitializeGlContent();
- ArSession_setCameraTextureName(ar_session_,
- background_renderer_.GetTextureId());
point_cloud_renderer_.InitializeGlContent();
andy_renderer_.InitializeGlContent(asset_manager_, "andy.obj", "andy.png");
plane_renderer_.InitializeGlContent(asset_manager_);
@@ -97,7 +128,12 @@
int width, int height) {
LOGI("OnSurfaceChanged(%d, %d)", width, height);
glViewport(0, 0, width, height);
- ArSession_setDisplayGeometry(ar_session_, display_rotation, width, height);
+ display_rotation_ = display_rotation;
+ width_ = width;
+ height_ = height;
+ if (ar_session_ != nullptr) {
+ ArSession_setDisplayGeometry(ar_session_, display_rotation, width, height);
+ }
}
void HelloArApplication::OnDrawFrame() {
@@ -110,6 +146,11 @@
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
+ if (ar_session_ == nullptr) return;
+
+ ArSession_setCameraTextureName(ar_session_,
+ background_renderer_.GetTextureId());
+
// Update session to get current frame and render camera background.
if (ArSession_update(ar_session_, ar_frame_) != AR_SUCCESS) {
LOGE("HelloArApplication::OnDrawFrame ArSession_update error");
@@ -125,10 +166,17 @@
/*near=*/0.1f, /*far=*/100.f,
glm::value_ptr(projection_mat));
+ ArTrackingState camera_tracking_state;
+ ArCamera_getTrackingState(ar_session_, ar_camera, &camera_tracking_state);
ArCamera_release(ar_camera);
background_renderer_.Draw(ar_session_, ar_frame_);
+ // If the camera isn't tracking don't bother rendering other objects.
+ if (camera_tracking_state != AR_TRACKING_STATE_TRACKING) {
+ return;
+ }
+
// Get light estimation value.
ArLightEstimate* ar_light_estimate;
ArLightEstimateState ar_light_estimate_state;
@@ -176,21 +224,49 @@
ArTrackable* ar_trackable = nullptr;
ArTrackableList_acquireItem(ar_session_, plane_list, i, &ar_trackable);
ArPlane* ar_plane = ArAsPlane(ar_trackable);
+ ArTrackingState out_tracking_state;
+ ArTrackable_getTrackingState(ar_session_, ar_trackable,
+ &out_tracking_state);
- const auto iter = plane_color_map_.find(ar_plane);
- glm::vec3 color;
- if (iter != plane_color_map_.end()) {
- color = iter->second;
- } else {
- color = GetRandomPlaneColor();
- plane_color_map_.insert({ar_plane, color});
+ ArPlane* subsume_plane;
+ ArPlane_acquireSubsumedBy(ar_session_, ar_plane, &subsume_plane);
+ if (subsume_plane != nullptr) {
+ ArTrackable_release(ArAsTrackable(subsume_plane));
+ continue;
}
- plane_renderer_.Draw(projection_mat, view_mat, ar_session_, ar_plane,
- color);
+ if (ArTrackingState::AR_TRACKING_STATE_TRACKING != out_tracking_state) {
+ continue;
+ }
- ArTrackable_release(ar_trackable);
+ ArTrackingState plane_tracking_state;
+ ArTrackable_getTrackingState(ar_session_, ArAsTrackable(ar_plane),
+ &plane_tracking_state);
+ if (plane_tracking_state == AR_TRACKING_STATE_TRACKING) {
+ const auto iter = plane_color_map_.find(ar_plane);
+ glm::vec3 color;
+ if (iter != plane_color_map_.end()) {
+ color = iter->second;
+
+ // If this is an already observed trackable release it so it doesn't
+ // leave an additional reference dangling.
+ ArTrackable_release(ar_trackable);
+ } else {
+ // The first plane is always white.
+ if (!first_plane_has_been_found_) {
+ first_plane_has_been_found_ = true;
+ color = kWhite;
+ } else {
+ color = GetRandomPlaneColor();
+ }
+ plane_color_map_.insert({ar_plane, color});
+ }
+
+ plane_renderer_.Draw(projection_mat, view_mat, ar_session_, ar_plane,
+ color);
+ }
}
+
ArTrackableList_destroy(plane_list);
plane_list = nullptr;
@@ -218,39 +294,50 @@
// The hitTest method sorts the resulting list by distance from the camera,
// increasing. The first hit result will usually be the most relevant when
- // responding to user input
- for (int32_t i = 0; i < hit_result_list_size; ++i) {
- ArHitResult* ar_hit_result = nullptr;
- ArHitResult_create(ar_session_, &ar_hit_result);
- ArHitResultList_getItem(ar_session_, hit_result_list, i, ar_hit_result);
+ // responding to user input.
- if (ar_hit_result == nullptr) {
+ ArHitResult* ar_hit_result = nullptr;
+ for (int32_t i = 0; i < hit_result_list_size; ++i) {
+ ArHitResult* ar_hit = nullptr;
+ ArHitResult_create(ar_session_, &ar_hit);
+ ArHitResultList_getItem(ar_session_, hit_result_list, i, ar_hit);
+
+ if (ar_hit == nullptr) {
LOGE("HelloArApplication::OnTouched ArHitResultList_getItem error");
return;
}
- // Only consider planes for this sample app.
ArTrackable* ar_trackable = nullptr;
- ArHitResult_acquireTrackable(ar_session_, ar_hit_result, &ar_trackable);
+ ArHitResult_acquireTrackable(ar_session_, ar_hit, &ar_trackable);
ArTrackableType ar_trackable_type = AR_TRACKABLE_NOT_VALID;
ArTrackable_getType(ar_session_, ar_trackable, &ar_trackable_type);
- if (ar_trackable_type != AR_TRACKABLE_PLANE) {
- ArTrackable_release(ar_trackable);
- continue;
- }
+ // Creates an anchor if a plane or an oriented point was hit.
+ if (AR_TRACKABLE_PLANE == ar_trackable_type) {
+ ArPose* ar_pose = nullptr;
+ ArPose_create(ar_session_, nullptr, &ar_pose);
+ ArHitResult_getHitPose(ar_session_, ar_hit, ar_pose);
+ int32_t in_polygon = 0;
+ ArPlane* ar_plane = ArAsPlane(ar_trackable);
+ ArPlane_isPoseInPolygon(ar_session_, ar_plane, ar_pose, &in_polygon);
+ ArPose_destroy(ar_pose);
+ if (!in_polygon) {
+ continue;
+ }
- ArPose* ar_pose = nullptr;
- ArPose_create(ar_session_, nullptr, &ar_pose);
- ArHitResult_getHitPose(ar_session_, ar_hit_result, ar_pose);
- int32_t in_polygon = 0;
- ArPlane* ar_plane = ArAsPlane(ar_trackable);
- ArPlane_isPoseInPolygon(ar_session_, ar_plane, ar_pose, &in_polygon);
- ArTrackable_release(ar_trackable);
- ArPose_destroy(ar_pose);
- if (!in_polygon) {
- continue;
+ ar_hit_result = ar_hit;
+ break;
+ } else if (AR_TRACKABLE_POINT == ar_trackable_type) {
+ ArPoint* ar_point = ArAsPoint(ar_trackable);
+ ArPointOrientationMode mode;
+ ArPoint_getOrientationMode(ar_session_, ar_point, &mode);
+ if (AR_POINT_ORIENTATION_ESTIMATED_SURFACE_NORMAL == mode) {
+ ar_hit_result = ar_hit;
+ break;
+ }
}
+ }
+ if (ar_hit_result) {
// Note that the application is responsible for releasing the anchor
// pointer after using it. Call ArAnchor_release(anchor) to release.
ArAnchor* anchor = nullptr;
@@ -265,16 +352,21 @@
ArAnchor_getTrackingState(ar_session_, anchor, &tracking_state);
if (tracking_state != AR_TRACKING_STATE_TRACKING) {
ArAnchor_release(anchor);
- continue;
+ return;
}
- tracked_obj_set_.insert(anchor);
+ if (tracked_obj_set_.size() >= kMaxNumberOfAndroidsToRender) {
+ ArAnchor_release(tracked_obj_set_[0]);
+ tracked_obj_set_.erase(tracked_obj_set_.begin());
+ }
+
+ tracked_obj_set_.push_back(anchor);
ArHitResult_destroy(ar_hit_result);
ar_hit_result = nullptr;
- }
- ArHitResultList_destroy(hit_result_list);
- hit_result_list = nullptr;
+ ArHitResultList_destroy(hit_result_list);
+ hit_result_list = nullptr;
+ }
}
}
diff --git a/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.h b/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.h
index aa663d7..7bd5bad 100644
--- a/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.h
+++ b/samples/hello_ar_c/app/src/main/cpp/hello_ar_application.h
@@ -22,9 +22,9 @@
#include <android/asset_manager.h>
#include <jni.h>
#include <memory>
+#include <set>
#include <string>
#include <unordered_map>
-#include <unordered_set>
#include "arcore_c_api.h"
#include "background_renderer.h"
@@ -41,14 +41,14 @@
public:
// Constructor and deconstructor.
HelloArApplication() = default;
- HelloArApplication(AAssetManager* asset_manager, void* env, void* context);
+ HelloArApplication(AAssetManager* asset_manager);
~HelloArApplication();
// OnPause is called on the UI thread from the Activity's onPause method.
void OnPause();
// OnResume is called on the UI thread from the Activity's onResume method.
- void OnResume();
+ void OnResume(void* env, void* context, void* activity);
// OnSurfaceCreated is called on the OpenGL thread when GLSurfaceView
// is created.
@@ -78,14 +78,23 @@
ArSession* ar_session_ = nullptr;
ArFrame* ar_frame_ = nullptr;
+ bool install_requested_ = false;
+ int width_ = 1;
+ int height_ = 1;
+ int display_rotation_ = 0;
+
AAssetManager* const asset_manager_;
// The anchors at which we are drawing android models
- std::unordered_set<ArAnchor*> tracked_obj_set_;
+ std::vector<ArAnchor*> tracked_obj_set_;
// Stores the randomly-selected color each plane is drawn with
std::unordered_map<ArPlane*, glm::vec3> plane_color_map_;
+ // The first plane is always rendered in white, if this is true then a plane
+ // at some point has been found.
+ bool first_plane_has_been_found_ = false;
+
PointCloudRenderer point_cloud_renderer_;
BackgroundRenderer background_renderer_;
PlaneRenderer plane_renderer_;
diff --git a/samples/hello_ar_c/app/src/main/cpp/jni_interface.cc b/samples/hello_ar_c/app/src/main/cpp/jni_interface.cc
index ea6aaf4..6e3d1a0 100644
--- a/samples/hello_ar_c/app/src/main/cpp/jni_interface.cc
+++ b/samples/hello_ar_c/app/src/main/cpp/jni_interface.cc
@@ -46,9 +46,9 @@
}
JNI_METHOD(jlong, createNativeApplication)
-(JNIEnv *env, jclass, jobject j_asset_manager, jobject context) {
+(JNIEnv *env, jclass, jobject j_asset_manager) {
AAssetManager *asset_manager = AAssetManager_fromJava(env, j_asset_manager);
- return jptr(new hello_ar::HelloArApplication(asset_manager, env, context));
+ return jptr(new hello_ar::HelloArApplication(asset_manager));
}
JNI_METHOD(void, destroyNativeApplication)
@@ -62,8 +62,9 @@
}
JNI_METHOD(void, onResume)
-(JNIEnv *, jclass, jlong native_application) {
- native(native_application)->OnResume();
+(JNIEnv *env, jclass, jlong native_application, jobject context,
+ jobject activity) {
+ native(native_application)->OnResume(env, context, activity);
}
JNI_METHOD(void, onGlSurfaceCreated)
diff --git a/samples/hello_ar_c/app/src/main/cpp/util.cc b/samples/hello_ar_c/app/src/main/cpp/util.cc
index 05effba..a02c13c 100644
--- a/samples/hello_ar_c/app/src/main/cpp/util.cc
+++ b/samples/hello_ar_c/app/src/main/cpp/util.cc
@@ -286,8 +286,8 @@
break;
}
[[clang::fallthrough]];
- // Intentionally falling to default error case because vertex
- // normal face only has two values.
+ // Intentionally falling to default error case because vertex
+ // normal face only has two values.
default:
// Error formatting.
LOGE(
diff --git a/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java b/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java
index 7bf1c61..228d280 100644
--- a/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java
+++ b/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/HelloArActivity.java
@@ -119,7 +119,7 @@
mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
JniInterface.assetManager = getAssets();
- mNativeApplication = JniInterface.createNativeApplication(getAssets(), getApplicationContext());
+ mNativeApplication = JniInterface.createNativeApplication(getAssets());
mPlaneStatusCheckingHandler = new Handler();
}
@@ -134,7 +134,7 @@
return;
}
- JniInterface.onResume(mNativeApplication);
+ JniInterface.onResume(mNativeApplication, getApplicationContext(), this);
mSurfaceView.onResume();
mLoadingMessageSnackbar =
diff --git a/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/JniInterface.java b/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/JniInterface.java
index 37933cf..9ad606b 100644
--- a/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/JniInterface.java
+++ b/samples/hello_ar_c/app/src/main/java/com/google/ar/core/examples/c/helloar/JniInterface.java
@@ -1,5 +1,6 @@
package com.google.ar.core.examples.c.helloar;
+import android.app.Activity;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
@@ -17,13 +18,13 @@
private static final String TAG = "JniInterface";
static AssetManager assetManager;
- public static native long createNativeApplication(AssetManager assetManager, Context context);
+ public static native long createNativeApplication(AssetManager assetManager);
public static native void destroyNativeApplication(long nativeApplication);
public static native void onPause(long nativeApplication);
- public static native void onResume(long nativeApplication);
+ public static native void onResume(long nativeApplication, Context context, Activity activity);
/** Allocate OpenGL resources for rendering. */
public static native void onGlSurfaceCreated(long nativeApplication);
diff --git a/samples/hello_ar_c/app/src/main/res/layout/activity_main.xml b/samples/hello_ar_c/app/src/main/res/layout/activity_main.xml
index 9b8502d..0e3be9b 100644
--- a/samples/hello_ar_c/app/src/main/res/layout/activity_main.xml
+++ b/samples/hello_ar_c/app/src/main/res/layout/activity_main.xml
@@ -19,10 +19,10 @@
android:layout_height="match_parent"
tools:context="com.google.ar.core.examples.c.helloar.HelloArActivity">
- <android.opengl.GLSurfaceView
- android:id="@+id/surfaceview"
- android:layout_width="match_parent"
- android:layout_height="match_parent"
- android:layout_gravity="top" />
+ <android.opengl.GLSurfaceView
+ android:id="@+id/surfaceview"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:layout_gravity="top"/>
</RelativeLayout>
diff --git a/samples/hello_ar_c/app/src/main/res/values/strings.xml b/samples/hello_ar_c/app/src/main/res/values/strings.xml
index f149762..8b7a8ad 100644
--- a/samples/hello_ar_c/app/src/main/res/values/strings.xml
+++ b/samples/hello_ar_c/app/src/main/res/values/strings.xml
@@ -15,5 +15,5 @@
limitations under the License.
-->
<resources>
- <string name="app_name">HelloAR C</string>
+ <string name="app_name">HelloAR C</string>
</resources>
diff --git a/samples/hello_ar_c/app/src/main/res/values/styles.xml b/samples/hello_ar_c/app/src/main/res/values/styles.xml
index 0ecfbc5..3a71bd3 100644
--- a/samples/hello_ar_c/app/src/main/res/values/styles.xml
+++ b/samples/hello_ar_c/app/src/main/res/values/styles.xml
@@ -15,21 +15,21 @@
-->
<resources>
+ <!--
+ Base application theme, dependent on API level. This theme is replaced
+ by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
+ -->
+ <style name="AppBaseTheme" parent="android:Theme.Light">
<!--
- Base application theme, dependent on API level. This theme is replaced
- by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
+ Theme customizations available in newer API levels can go in
+ res/values-vXX/styles.xml, while customizations related to
+ backward-compatibility can go here.
-->
- <style name="AppBaseTheme" parent="android:Theme.Light">
- <!--
- Theme customizations available in newer API levels can go in
- res/values-vXX/styles.xml, while customizations related to
- backward-compatibility can go here.
- -->
- </style>
+ </style>
- <!-- Application theme. -->
- <style name="AppTheme" parent="AppBaseTheme">
- <!-- All customizations that are NOT specific to a particular API-level can go here. -->
- </style>
+ <!-- Application theme. -->
+ <style name="AppTheme" parent="AppBaseTheme">
+ <!-- All customizations that are NOT specific to a particular API-level can go here. -->
+ </style>
</resources>
diff --git a/samples/hello_ar_c/build.gradle b/samples/hello_ar_c/build.gradle
index 85691a6..be505f0 100644
--- a/samples/hello_ar_c/build.gradle
+++ b/samples/hello_ar_c/build.gradle
@@ -17,9 +17,6 @@
google()
jcenter()
mavenLocal()
- maven {
- url "${project.rootDir}/../../libraries/m2repository"
- }
}
}
diff --git a/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar b/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar
index 12a0871..7a3265e 100644
--- a/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/hello_ar_c/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/samples/hello_ar_java/app/build.gradle b/samples/hello_ar_java/app/build.gradle
index 59fecf9..5fbde75 100644
--- a/samples/hello_ar_java/app/build.gradle
+++ b/samples/hello_ar_java/app/build.gradle
@@ -22,7 +22,7 @@
dependencies {
// ARCore library
- implementation 'com.google.ar:core:0.91.0'
+ implementation 'com.google.ar:core:1.0.0'
// Obj - a simple Wavefront OBJ file loader
// https://github.com/javagl/Obj
diff --git a/samples/hello_ar_java/app/src/main/AndroidManifest.xml b/samples/hello_ar_java/app/src/main/AndroidManifest.xml
index 3159225..1446d1d 100644
--- a/samples/hello_ar_java/app/src/main/AndroidManifest.xml
+++ b/samples/hello_ar_java/app/src/main/AndroidManifest.xml
@@ -15,30 +15,36 @@
limitations under the License.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:tools="http://schemas.android.com/tools"
+ xmlns:tools="http://schemas.android.com/tools"
package="com.google.ar.core.examples.java.helloar">
- <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.CAMERA"/>
+ <!-- This tag indicates that this application requires ARCore. This results in the application
+ only being visible in the Google Play Store on devices that support ARCore. -->
+ <uses-feature android:name="android.hardware.camera.ar" android:required="true"/>
- <application
- android:allowBackup="false"
- android:icon="@drawable/ic_launcher"
+ <application
+ android:allowBackup="false"
+ android:icon="@drawable/ic_launcher"
+ android:label="@string/app_name"
+ android:theme="@style/AppTheme"
+ android:usesCleartextTraffic="false"
+ tools:ignore="GoogleAppIndexingWarning">
+
+ <activity
+ android:name=".HelloArActivity"
android:label="@string/app_name"
- android:theme="@style/AppTheme"
- android:usesCleartextTraffic="false"
- tools:ignore="GoogleAppIndexingWarning">
-
- <activity
- android:name=".HelloArActivity"
- android:label="@string/app_name"
- android:configChanges="orientation|screenSize"
- android:exported="true"
- android:theme="@style/Theme.AppCompat.NoActionBar"
- android:screenOrientation="locked">
- <intent-filter>
- <action android:name="android.intent.action.MAIN" />
- <category android:name="android.intent.category.LAUNCHER" />
- </intent-filter>
- </activity>
- </application>
+ android:configChanges="orientation|screenSize"
+ android:exported="true"
+ android:theme="@style/Theme.AppCompat.NoActionBar"
+ android:screenOrientation="locked">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+ </activity>
+ <!-- This tag indicates that this application requires ARCore. This results in the Google Play
+ Store downloading and installing ARCore along with the application. -->
+ <meta-data android:name="com.google.ar.core" android:value="required" />
+ </application>
</manifest>
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/CameraPermissionHelper.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/CameraPermissionHelper.java
index 397045d..3403f7c 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/CameraPermissionHelper.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/CameraPermissionHelper.java
@@ -28,20 +28,16 @@
private static final int CAMERA_PERMISSION_CODE = 0;
private static final String CAMERA_PERMISSION = Manifest.permission.CAMERA;
- /**
- * Check to see we have the necessary permissions for this app.
- */
+ /** Check to see we have the necessary permissions for this app. */
public static boolean hasCameraPermission(Activity activity) {
return ContextCompat.checkSelfPermission(activity, CAMERA_PERMISSION)
- == PackageManager.PERMISSION_GRANTED;
+ == PackageManager.PERMISSION_GRANTED;
}
- /**
- * Check to see we have the necessary permissions for this app, and ask for them if we don't.
- */
+ /** Check to see we have the necessary permissions for this app, and ask for them if we don't. */
public static void requestCameraPermission(Activity activity) {
- ActivityCompat.requestPermissions(activity, new String[]{CAMERA_PERMISSION},
- CAMERA_PERMISSION_CODE);
+ ActivityCompat.requestPermissions(
+ activity, new String[] {CAMERA_PERMISSION}, CAMERA_PERMISSION_CODE);
}
/** Check to see if we need to show the rationale for this permission. */
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/DisplayRotationHelper.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/DisplayRotationHelper.java
index c0fdb16..3437d8c 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/DisplayRotationHelper.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/DisplayRotationHelper.java
@@ -28,80 +28,78 @@
* events.
*/
public class DisplayRotationHelper implements DisplayListener {
- private boolean mViewportChanged;
- private int mViewportWidth;
- private int mViewportHeight;
- private final Context mContext;
- private final Display mDisplay;
+ private boolean viewportChanged;
+ private int viewportWidth;
+ private int viewportHeight;
+ private final Context context;
+ private final Display display;
- /**
- * Constructs the DisplayRotationHelper but does not register the listener yet.
- *
- * @param context the Android {@link Context}.
- */
- public DisplayRotationHelper(Context context) {
- mContext = context;
- mDisplay = context.getSystemService(WindowManager.class).getDefaultDisplay();
+ /**
+ * Constructs the DisplayRotationHelper but does not register the listener yet.
+ *
+ * @param context the Android {@link Context}.
+ */
+ public DisplayRotationHelper(Context context) {
+ this.context = context;
+ display = context.getSystemService(WindowManager.class).getDefaultDisplay();
+ }
+
+ /** Registers the display listener. Should be called from {@link Activity#onResume()}. */
+ public void onResume() {
+ context.getSystemService(DisplayManager.class).registerDisplayListener(this, null);
+ }
+
+ /** Unregisters the display listener. Should be called from {@link Activity#onPause()}. */
+ public void onPause() {
+ context.getSystemService(DisplayManager.class).unregisterDisplayListener(this);
+ }
+
+ /**
+ * Records a change in surface dimensions. This will be later used by {@link
+ * #updateSessionIfNeeded(Session)}. Should be called from {@link
+ * android.opengl.GLSurfaceView.Renderer
+ * #onSurfaceChanged(javax.microedition.khronos.opengles.GL10, int, int)}.
+ *
+ * @param width the updated width of the surface.
+ * @param height the updated height of the surface.
+ */
+ public void onSurfaceChanged(int width, int height) {
+ viewportWidth = width;
+ viewportHeight = height;
+ viewportChanged = true;
+ }
+
+ /**
+ * Updates the session display geometry if a change was posted either by {@link
+ * #onSurfaceChanged(int, int)} call or by {@link #onDisplayChanged(int)} system callback. This
+ * function should be called explicitly before each call to {@link Session#update()}. This
+ * function will also clear the 'pending update' (viewportChanged) flag.
+ *
+ * @param session the {@link Session} object to update if display geometry changed.
+ */
+ public void updateSessionIfNeeded(Session session) {
+ if (viewportChanged) {
+ int displayRotation = display.getRotation();
+ session.setDisplayGeometry(displayRotation, viewportWidth, viewportHeight);
+ viewportChanged = false;
}
+ }
- /** Registers the display listener. Should be called from {@link Activity#onResume()}. */
- public void onResume() {
- mContext.getSystemService(DisplayManager.class).registerDisplayListener(this, null);
- }
+ /**
+ * Returns the current rotation state of android display. Same as {@link Display#getRotation()}.
+ */
+ public int getRotation() {
+ return display.getRotation();
+ }
- /** Unregisters the display listener. Should be called from {@link Activity#onPause()}. */
- public void onPause() {
- mContext.getSystemService(DisplayManager.class).unregisterDisplayListener(this);
- }
+ @Override
+ public void onDisplayAdded(int displayId) {}
- /**
- * Records a change in surface dimensions. This will be later used by
- * {@link #updateSessionIfNeeded(Session)}. Should be called from
- * {@link android.opengl.GLSurfaceView.Renderer
- * #onSurfaceChanged(javax.microedition.khronos.opengles.GL10, int, int)}.
- *
- * @param width the updated width of the surface.
- * @param height the updated height of the surface.
- */
- public void onSurfaceChanged(int width, int height) {
- mViewportWidth = width;
- mViewportHeight = height;
- mViewportChanged = true;
- }
+ @Override
+ public void onDisplayRemoved(int displayId) {}
- /**
- * Updates the session display geometry if a change was posted either by
- * {@link #onSurfaceChanged(int, int)} call or by {@link #onDisplayChanged(int)} system
- * callback. This function should be called explicitly before each call to
- * {@link Session#update()}. This function will also clear the 'pending update'
- * (viewportChanged) flag.
- *
- * @param session the {@link Session} object to update if display geometry changed.
- */
- public void updateSessionIfNeeded(Session session) {
- if (mViewportChanged) {
- int displayRotation = mDisplay.getRotation();
- session.setDisplayGeometry(displayRotation, mViewportWidth, mViewportHeight);
- mViewportChanged = false;
- }
- }
-
- /**
- * Returns the current rotation state of android display.
- * Same as {@link Display#getRotation()}.
- */
- public int getRotation() {
- return mDisplay.getRotation();
- }
-
- @Override
- public void onDisplayAdded(int displayId) {}
-
- @Override
- public void onDisplayRemoved(int displayId) {}
-
- @Override
- public void onDisplayChanged(int displayId) {
- mViewportChanged = true;
- }
+ @Override
+ public void onDisplayChanged(int displayId) {
+ viewportChanged = true;
+ }
}
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java
index 9203b11..edeffd8 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java
@@ -29,15 +29,18 @@
import android.view.WindowManager;
import android.widget.Toast;
import com.google.ar.core.Anchor;
+import com.google.ar.core.ArCoreApk;
import com.google.ar.core.Camera;
import com.google.ar.core.Config;
import com.google.ar.core.Frame;
import com.google.ar.core.HitResult;
import com.google.ar.core.Plane;
+import com.google.ar.core.Point;
+import com.google.ar.core.Point.OrientationMode;
import com.google.ar.core.PointCloud;
import com.google.ar.core.Session;
import com.google.ar.core.Trackable;
-import com.google.ar.core.Trackable.TrackingState;
+import com.google.ar.core.TrackingState;
import com.google.ar.core.examples.java.helloar.rendering.BackgroundRenderer;
import com.google.ar.core.examples.java.helloar.rendering.ObjectRenderer;
import com.google.ar.core.examples.java.helloar.rendering.ObjectRenderer.BlendMode;
@@ -46,6 +49,7 @@
import com.google.ar.core.exceptions.UnavailableApkTooOldException;
import com.google.ar.core.exceptions.UnavailableArcoreNotInstalledException;
import com.google.ar.core.exceptions.UnavailableSdkTooOldException;
+import com.google.ar.core.exceptions.UnavailableUserDeclinedInstallationException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.ArrayBlockingQueue;
@@ -58,353 +62,378 @@
* plane to place a 3d model of the Android robot.
*/
public class HelloArActivity extends AppCompatActivity implements GLSurfaceView.Renderer {
- private static final String TAG = HelloArActivity.class.getSimpleName();
+ private static final String TAG = HelloArActivity.class.getSimpleName();
- // Rendering. The Renderers are created here, and initialized when the GL surface is created.
- private GLSurfaceView mSurfaceView;
+ // Rendering. The Renderers are created here, and initialized when the GL surface is created.
+ private GLSurfaceView surfaceView;
- private Session mSession;
- private GestureDetector mGestureDetector;
- private Snackbar mMessageSnackbar;
- private DisplayRotationHelper mDisplayRotationHelper;
+ private boolean installRequested;
- private final BackgroundRenderer mBackgroundRenderer = new BackgroundRenderer();
- private final ObjectRenderer mVirtualObject = new ObjectRenderer();
- private final ObjectRenderer mVirtualObjectShadow = new ObjectRenderer();
- private final PlaneRenderer mPlaneRenderer = new PlaneRenderer();
- private final PointCloudRenderer mPointCloud = new PointCloudRenderer();
+ private Session session;
+ private GestureDetector gestureDetector;
+ private Snackbar messageSnackbar;
+ private DisplayRotationHelper displayRotationHelper;
- // Temporary matrix allocated here to reduce number of allocations for each frame.
- private final float[] mAnchorMatrix = new float[16];
+ private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
+ private final ObjectRenderer virtualObject = new ObjectRenderer();
+ private final ObjectRenderer virtualObjectShadow = new ObjectRenderer();
+ private final PlaneRenderer planeRenderer = new PlaneRenderer();
+ private final PointCloudRenderer pointCloud = new PointCloudRenderer();
- // Tap handling and UI.
- private final ArrayBlockingQueue<MotionEvent> mQueuedSingleTaps = new ArrayBlockingQueue<>(16);
- private final ArrayList<Anchor> mAnchors = new ArrayList<>();
+ // Temporary matrix allocated here to reduce number of allocations for each frame.
+ private final float[] anchorMatrix = new float[16];
- @Override
- protected void onCreate(Bundle savedInstanceState) {
- super.onCreate(savedInstanceState);
- setContentView(R.layout.activity_main);
- mSurfaceView = findViewById(R.id.surfaceview);
- mDisplayRotationHelper = new DisplayRotationHelper(/*context=*/ this);
+ // Tap handling and UI.
+ private final ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
+ private final ArrayList<Anchor> anchors = new ArrayList<>();
- // Set up tap listener.
- mGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
- @Override
- public boolean onSingleTapUp(MotionEvent e) {
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_main);
+ surfaceView = findViewById(R.id.surfaceview);
+ displayRotationHelper = new DisplayRotationHelper(/*context=*/ this);
+
+ // Set up tap listener.
+ gestureDetector =
+ new GestureDetector(
+ this,
+ new GestureDetector.SimpleOnGestureListener() {
+ @Override
+ public boolean onSingleTapUp(MotionEvent e) {
onSingleTap(e);
return true;
- }
+ }
- @Override
- public boolean onDown(MotionEvent e) {
+ @Override
+ public boolean onDown(MotionEvent e) {
return true;
- }
+ }
+ });
+
+ surfaceView.setOnTouchListener(
+ new View.OnTouchListener() {
+ @Override
+ public boolean onTouch(View v, MotionEvent event) {
+ return gestureDetector.onTouchEvent(event);
+ }
});
- mSurfaceView.setOnTouchListener(new View.OnTouchListener() {
- @Override
- public boolean onTouch(View v, MotionEvent event) {
- return mGestureDetector.onTouchEvent(event);
- }
- });
+ // Set up renderer.
+ surfaceView.setPreserveEGLContextOnPause(true);
+ surfaceView.setEGLContextClientVersion(2);
+ surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
+ surfaceView.setRenderer(this);
+ surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
- // Set up renderer.
- mSurfaceView.setPreserveEGLContextOnPause(true);
- mSurfaceView.setEGLContextClientVersion(2);
- mSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
- mSurfaceView.setRenderer(this);
- mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ installRequested = false;
+ }
- Exception exception = null;
- String message = null;
- try {
- mSession = new Session(/* context= */ this);
- } catch (UnavailableArcoreNotInstalledException e) {
- message = "Please install ARCore";
- exception = e;
- } catch (UnavailableApkTooOldException e) {
- message = "Please update ARCore";
- exception = e;
- } catch (UnavailableSdkTooOldException e) {
- message = "Please update this app";
- exception = e;
- } catch (Exception e) {
- message = "This device does not support AR";
- exception = e;
- }
+ @Override
+ protected void onResume() {
+ super.onResume();
- if (message != null) {
- showSnackbarMessage(message, true);
- Log.e(TAG, "Exception creating session", exception);
+ if (session == null) {
+ Exception exception = null;
+ String message = null;
+ try {
+ switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
+ case INSTALL_REQUESTED:
+ installRequested = true;
return;
+ case INSTALLED:
+ break;
}
- // Create default config and check if supported.
- Config config = new Config(mSession);
- if (!mSession.isSupported(config)) {
- showSnackbarMessage("This device does not support AR", true);
- }
- mSession.configure(config);
- }
-
- @Override
- protected void onResume() {
- super.onResume();
-
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
- if (CameraPermissionHelper.hasCameraPermission(this)) {
- if (mSession != null) {
- showLoadingMessage();
- // Note that order matters - see the note in onPause(), the reverse applies here.
- mSession.resume();
- }
- mSurfaceView.onResume();
- mDisplayRotationHelper.onResume();
- } else {
- CameraPermissionHelper.requestCameraPermission(this);
- }
- }
-
- @Override
- public void onPause() {
- super.onPause();
- // Note that the order matters - GLSurfaceView is paused first so that it does not try
- // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
- // still call mSession.update() and get a SessionPausedException.
- mDisplayRotationHelper.onPause();
- mSurfaceView.onPause();
- if (mSession != null) {
- mSession.pause();
- }
- }
-
- @Override
- public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
if (!CameraPermissionHelper.hasCameraPermission(this)) {
- Toast.makeText(this,
- "Camera permission is needed to run this application", Toast.LENGTH_LONG).show();
- if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
- // Permission denied with checking "Do not ask again".
- CameraPermissionHelper.launchPermissionSettings(this);
+ CameraPermissionHelper.requestCameraPermission(this);
+ return;
+ }
+
+ session = new Session(/* context= */ this);
+ } catch (UnavailableArcoreNotInstalledException
+ | UnavailableUserDeclinedInstallationException e) {
+ message = "Please install ARCore";
+ exception = e;
+ } catch (UnavailableApkTooOldException e) {
+ message = "Please update ARCore";
+ exception = e;
+ } catch (UnavailableSdkTooOldException e) {
+ message = "Please update this app";
+ exception = e;
+ } catch (Exception e) {
+ message = "This device does not support AR";
+ exception = e;
+ }
+
+ if (message != null) {
+ showSnackbarMessage(message, true);
+ Log.e(TAG, "Exception creating session", exception);
+ return;
+ }
+
+ // Create default config and check if supported.
+ Config config = new Config(session);
+ if (!session.isSupported(config)) {
+ showSnackbarMessage("This device does not support AR", true);
+ }
+ session.configure(config);
+ }
+
+ showLoadingMessage();
+ // Note that order matters - see the note in onPause(), the reverse applies here.
+ session.resume();
+ surfaceView.onResume();
+ displayRotationHelper.onResume();
+ }
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ if (session != null) {
+ // Note that the order matters - GLSurfaceView is paused first so that it does not try
+ // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
+ // still call session.update() and get a SessionPausedException.
+ displayRotationHelper.onPause();
+ surfaceView.onPause();
+ session.pause();
+ }
+ }
+
+ @Override
+ public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
+ if (!CameraPermissionHelper.hasCameraPermission(this)) {
+ Toast.makeText(this, "Camera permission is needed to run this application", Toast.LENGTH_LONG)
+ .show();
+ if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
+ // Permission denied with checking "Do not ask again".
+ CameraPermissionHelper.launchPermissionSettings(this);
+ }
+ finish();
+ }
+ }
+
+ @Override
+ public void onWindowFocusChanged(boolean hasFocus) {
+ super.onWindowFocusChanged(hasFocus);
+ if (hasFocus) {
+ // Standard Android full-screen functionality.
+ getWindow()
+ .getDecorView()
+ .setSystemUiVisibility(
+ View.SYSTEM_UI_FLAG_LAYOUT_STABLE
+ | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
+ | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
+ | View.SYSTEM_UI_FLAG_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+ }
+ }
+
+ private void onSingleTap(MotionEvent e) {
+ // Queue tap if there is space. Tap is lost if queue is full.
+ queuedSingleTaps.offer(e);
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+ GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
+
+ // Create the texture and pass it to ARCore session to be filled during update().
+ backgroundRenderer.createOnGlThread(/*context=*/ this);
+
+ // Prepare the other rendering objects.
+ try {
+ virtualObject.createOnGlThread(/*context=*/ this, "andy.obj", "andy.png");
+ virtualObject.setMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);
+
+ virtualObjectShadow.createOnGlThread(/*context=*/ this, "andy_shadow.obj", "andy_shadow.png");
+ virtualObjectShadow.setBlendMode(BlendMode.Shadow);
+ virtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f);
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to read obj file");
+ }
+ try {
+ planeRenderer.createOnGlThread(/*context=*/ this, "trigrid.png");
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to read plane texture");
+ }
+ pointCloud.createOnGlThread(/*context=*/ this);
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 gl, int width, int height) {
+ displayRotationHelper.onSurfaceChanged(width, height);
+ GLES20.glViewport(0, 0, width, height);
+ }
+
+ @Override
+ public void onDrawFrame(GL10 gl) {
+ // Clear screen to notify driver it should not load any pixels from previous frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+
+ if (session == null) {
+ return;
+ }
+ // Notify ARCore session that the view size changed so that the perspective matrix and
+ // the video background can be properly adjusted.
+ displayRotationHelper.updateSessionIfNeeded(session);
+
+ try {
+ session.setCameraTextureName(backgroundRenderer.getTextureId());
+
+ // Obtain the current frame from ARSession. When the configuration is set to
+ // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
+ // camera framerate.
+ Frame frame = session.update();
+ Camera camera = frame.getCamera();
+
+ // Handle taps. Handling only one tap per frame, as taps are usually low frequency
+ // compared to frame rate.
+
+ MotionEvent tap = queuedSingleTaps.poll();
+ if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
+ for (HitResult hit : frame.hitTest(tap)) {
+ // Check if any plane was hit, and if it was hit inside the plane polygon
+ Trackable trackable = hit.getTrackable();
+ // Creates an anchor if a plane or an oriented point was hit.
+ if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
+ || (trackable instanceof Point
+ && ((Point) trackable).getOrientationMode()
+ == OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
+ // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
+ // Cap the number of objects created. This avoids overloading both the
+ // rendering system and ARCore.
+ if (anchors.size() >= 20) {
+ anchors.get(0).detach();
+ anchors.remove(0);
}
- finish();
+ // Adding an Anchor tells ARCore that it should track this position in
+ // space. This anchor is created on the Plane to place the 3D model
+ // in the correct position relative both to the world and to the plane.
+ anchors.add(hit.createAnchor());
+ break;
+ }
}
+ }
+
+ // Draw background.
+ backgroundRenderer.draw(frame);
+
+ // If not tracking, don't draw 3d objects.
+ if (camera.getTrackingState() == TrackingState.PAUSED) {
+ return;
+ }
+
+ // Get projection matrix.
+ float[] projmtx = new float[16];
+ camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
+
+ // Get camera matrix and draw.
+ float[] viewmtx = new float[16];
+ camera.getViewMatrix(viewmtx, 0);
+
+ // Compute lighting from average intensity of the image.
+ final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
+
+ // Visualize tracked points.
+ PointCloud pointCloud = frame.acquirePointCloud();
+ this.pointCloud.update(pointCloud);
+ this.pointCloud.draw(viewmtx, projmtx);
+
+ // Application is responsible for releasing the point cloud resources after
+ // using it.
+ pointCloud.release();
+
+ // Check if we detected at least one plane. If so, hide the loading message.
+ if (messageSnackbar != null) {
+ for (Plane plane : session.getAllTrackables(Plane.class)) {
+ if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
+ && plane.getTrackingState() == TrackingState.TRACKING) {
+ hideLoadingMessage();
+ break;
+ }
+ }
+ }
+
+ // Visualize planes.
+ planeRenderer.drawPlanes(
+ session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
+
+ // Visualize anchors created by touch.
+ float scaleFactor = 1.0f;
+ for (Anchor anchor : anchors) {
+ if (anchor.getTrackingState() != TrackingState.TRACKING) {
+ continue;
+ }
+ // Get the current pose of an Anchor in world space. The Anchor pose is updated
+ // during calls to session.update() as ARCore refines its estimate of the world.
+ anchor.getPose().toMatrix(anchorMatrix, 0);
+
+ // Update and draw the model and its shadow.
+ virtualObject.updateModelMatrix(anchorMatrix, scaleFactor);
+ virtualObjectShadow.updateModelMatrix(anchorMatrix, scaleFactor);
+ virtualObject.draw(viewmtx, projmtx, lightIntensity);
+ virtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
+ }
+
+ } catch (Throwable t) {
+ // Avoid crashing the application due to unhandled exceptions.
+ Log.e(TAG, "Exception on the OpenGL thread", t);
}
+ }
- @Override
- public void onWindowFocusChanged(boolean hasFocus) {
- super.onWindowFocusChanged(hasFocus);
- if (hasFocus) {
- // Standard Android full-screen functionality.
- getWindow().getDecorView().setSystemUiVisibility(
- View.SYSTEM_UI_FLAG_LAYOUT_STABLE
- | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
- | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
- | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
- | View.SYSTEM_UI_FLAG_FULLSCREEN
- | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- }
- }
-
- private void onSingleTap(MotionEvent e) {
- // Queue tap if there is space. Tap is lost if queue is full.
- mQueuedSingleTaps.offer(e);
- }
-
- @Override
- public void onSurfaceCreated(GL10 gl, EGLConfig config) {
- GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
-
- // Create the texture and pass it to ARCore session to be filled during update().
- mBackgroundRenderer.createOnGlThread(/*context=*/ this);
- if (mSession != null) {
- mSession.setCameraTextureName(mBackgroundRenderer.getTextureId());
- }
-
- // Prepare the other rendering objects.
- try {
- mVirtualObject.createOnGlThread(/*context=*/this, "andy.obj", "andy.png");
- mVirtualObject.setMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);
-
- mVirtualObjectShadow.createOnGlThread(/*context=*/this,
- "andy_shadow.obj", "andy_shadow.png");
- mVirtualObjectShadow.setBlendMode(BlendMode.Shadow);
- mVirtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f);
- } catch (IOException e) {
- Log.e(TAG, "Failed to read obj file");
- }
- try {
- mPlaneRenderer.createOnGlThread(/*context=*/this, "trigrid.png");
- } catch (IOException e) {
- Log.e(TAG, "Failed to read plane texture");
- }
- mPointCloud.createOnGlThread(/*context=*/this);
- }
-
- @Override
- public void onSurfaceChanged(GL10 gl, int width, int height) {
- mDisplayRotationHelper.onSurfaceChanged(width, height);
- GLES20.glViewport(0, 0, width, height);
- }
-
- @Override
- public void onDrawFrame(GL10 gl) {
- // Clear screen to notify driver it should not load any pixels from previous frame.
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
-
- if (mSession == null) {
- return;
- }
- // Notify ARCore session that the view size changed so that the perspective matrix and
- // the video background can be properly adjusted.
- mDisplayRotationHelper.updateSessionIfNeeded(mSession);
-
- try {
- // Obtain the current frame from ARSession. When the configuration is set to
- // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
- // camera framerate.
- Frame frame = mSession.update();
- Camera camera = frame.getCamera();
-
- // Handle taps. Handling only one tap per frame, as taps are usually low frequency
- // compared to frame rate.
- MotionEvent tap = mQueuedSingleTaps.poll();
- if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
- for (HitResult hit : frame.hitTest(tap)) {
- // Check if any plane was hit, and if it was hit inside the plane polygon
- Trackable trackable = hit.getTrackable();
- if (trackable instanceof Plane
- && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())) {
- // Cap the number of objects created. This avoids overloading both the
- // rendering system and ARCore.
- if (mAnchors.size() >= 20) {
- mAnchors.get(0).detach();
- mAnchors.remove(0);
- }
- // Adding an Anchor tells ARCore that it should track this position in
- // space. This anchor is created on the Plane to place the 3d model
- // in the correct position relative both to the world and to the plane.
- mAnchors.add(hit.createAnchor());
-
- // Hits are sorted by depth. Consider only closest hit on a plane.
- break;
- }
- }
- }
-
- // Draw background.
- mBackgroundRenderer.draw(frame);
-
- // If not tracking, don't draw 3d objects.
- if (camera.getTrackingState() == TrackingState.PAUSED) {
- return;
- }
-
- // Get projection matrix.
- float[] projmtx = new float[16];
- camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
-
- // Get camera matrix and draw.
- float[] viewmtx = new float[16];
- camera.getViewMatrix(viewmtx, 0);
-
- // Compute lighting from average intensity of the image.
- final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
-
- // Visualize tracked points.
- PointCloud pointCloud = frame.acquirePointCloud();
- mPointCloud.update(pointCloud);
- mPointCloud.draw(viewmtx, projmtx);
-
- // Application is responsible for releasing the point cloud resources after
- // using it.
- pointCloud.release();
-
- // Check if we detected at least one plane. If so, hide the loading message.
- if (mMessageSnackbar != null) {
- for (Plane plane : mSession.getAllTrackables(Plane.class)) {
- if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
- && plane.getTrackingState() == TrackingState.TRACKING) {
- hideLoadingMessage();
- break;
- }
- }
- }
-
- // Visualize planes.
- mPlaneRenderer.drawPlanes(
- mSession.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
-
- // Visualize anchors created by touch.
- float scaleFactor = 1.0f;
- for (Anchor anchor : mAnchors) {
- if (anchor.getTrackingState() != TrackingState.TRACKING) {
- continue;
- }
- // Get the current pose of an Anchor in world space. The Anchor pose is updated
- // during calls to session.update() as ARCore refines its estimate of the world.
- anchor.getPose().toMatrix(mAnchorMatrix, 0);
-
- // Update and draw the model and its shadow.
- mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
- mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
- mVirtualObject.draw(viewmtx, projmtx, lightIntensity);
- mVirtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
- }
-
- } catch (Throwable t) {
- // Avoid crashing the application due to unhandled exceptions.
- Log.e(TAG, "Exception on the OpenGL thread", t);
- }
- }
-
- private void showSnackbarMessage(String message, boolean finishOnDismiss) {
- mMessageSnackbar = Snackbar.make(
+ private void showSnackbarMessage(String message, boolean finishOnDismiss) {
+ messageSnackbar =
+ Snackbar.make(
HelloArActivity.this.findViewById(android.R.id.content),
- message, Snackbar.LENGTH_INDEFINITE);
- mMessageSnackbar.getView().setBackgroundColor(0xbf323232);
- if (finishOnDismiss) {
- mMessageSnackbar.setAction(
- "Dismiss",
- new View.OnClickListener() {
- @Override
- public void onClick(View v) {
- mMessageSnackbar.dismiss();
- }
- });
- mMessageSnackbar.addCallback(
- new BaseTransientBottomBar.BaseCallback<Snackbar>() {
- @Override
- public void onDismissed(Snackbar transientBottomBar, int event) {
- super.onDismissed(transientBottomBar, event);
- finish();
- }
- });
- }
- mMessageSnackbar.show();
- }
-
- private void showLoadingMessage() {
- runOnUiThread(new Runnable() {
+ message,
+ Snackbar.LENGTH_INDEFINITE);
+ messageSnackbar.getView().setBackgroundColor(0xbf323232);
+ if (finishOnDismiss) {
+ messageSnackbar.setAction(
+ "Dismiss",
+ new View.OnClickListener() {
@Override
- public void run() {
- showSnackbarMessage("Searching for surfaces...", false);
+ public void onClick(View v) {
+ messageSnackbar.dismiss();
}
- });
- }
-
- private void hideLoadingMessage() {
- runOnUiThread(new Runnable() {
+ });
+ messageSnackbar.addCallback(
+ new BaseTransientBottomBar.BaseCallback<Snackbar>() {
@Override
- public void run() {
- if (mMessageSnackbar != null) {
- mMessageSnackbar.dismiss();
- }
- mMessageSnackbar = null;
+ public void onDismissed(Snackbar transientBottomBar, int event) {
+ super.onDismissed(transientBottomBar, event);
+ finish();
}
- });
+ });
}
+ messageSnackbar.show();
+ }
+
+ private void showLoadingMessage() {
+ runOnUiThread(
+ new Runnable() {
+ @Override
+ public void run() {
+ showSnackbarMessage("Searching for surfaces...", false);
+ }
+ });
+ }
+
+ private void hideLoadingMessage() {
+ runOnUiThread(
+ new Runnable() {
+ @Override
+ public void run() {
+ if (messageSnackbar != null) {
+ messageSnackbar.dismiss();
+ }
+ messageSnackbar = null;
+ }
+ });
+ }
}
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/package-info.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/package-info.java
deleted file mode 100644
index 71ee518..0000000
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2017 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * A sample showing how to build an Augmented Reality app using ARCore.
- */
-package com.google.ar.core.examples.java.helloar;
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/BackgroundRenderer.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/BackgroundRenderer.java
index dfae0e2..0fab1be 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/BackgroundRenderer.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/BackgroundRenderer.java
@@ -28,155 +28,159 @@
import javax.microedition.khronos.opengles.GL10;
/**
- * This class renders the AR background from camera feed. It creates and hosts the texture
- * given to ARCore to be filled with the camera image.
+ * This class renders the AR background from camera feed. It creates and hosts the texture given to
+ * ARCore to be filled with the camera image.
*/
public class BackgroundRenderer {
- private static final String TAG = BackgroundRenderer.class.getSimpleName();
+ private static final String TAG = BackgroundRenderer.class.getSimpleName();
- private static final int COORDS_PER_VERTEX = 3;
- private static final int TEXCOORDS_PER_VERTEX = 2;
- private static final int FLOAT_SIZE = 4;
+ private static final int COORDS_PER_VERTEX = 3;
+ private static final int TEXCOORDS_PER_VERTEX = 2;
+ private static final int FLOAT_SIZE = 4;
- private FloatBuffer mQuadVertices;
- private FloatBuffer mQuadTexCoord;
- private FloatBuffer mQuadTexCoordTransformed;
+ private FloatBuffer quadVertices;
+ private FloatBuffer quadTexCoord;
+ private FloatBuffer quadTexCoordTransformed;
- private int mQuadProgram;
+ private int quadProgram;
- private int mQuadPositionParam;
- private int mQuadTexCoordParam;
- private int mTextureId = -1;
+ private int quadPositionParam;
+ private int quadTexCoordParam;
+ private int textureId = -1;
- public BackgroundRenderer() {
+ public BackgroundRenderer() {}
+
+ public int getTextureId() {
+ return textureId;
+ }
+
+ /**
+ * Allocates and initializes OpenGL resources needed by the background renderer. Must be called on
+ * the OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10,
+ * EGLConfig)}.
+ *
+ * @param context Needed to access shader source.
+ */
+ public void createOnGlThread(Context context) {
+ // Generate the background texture.
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ textureId = textures[0];
+ int textureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
+ GLES20.glBindTexture(textureTarget, textureId);
+ GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
+ GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
+
+ int numVertices = 4;
+ if (numVertices != QUAD_COORDS.length / COORDS_PER_VERTEX) {
+ throw new RuntimeException("Unexpected number of vertices in BackgroundRenderer.");
}
- public int getTextureId() {
- return mTextureId;
+ ByteBuffer bbVertices = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE);
+ bbVertices.order(ByteOrder.nativeOrder());
+ quadVertices = bbVertices.asFloatBuffer();
+ quadVertices.put(QUAD_COORDS);
+ quadVertices.position(0);
+
+ ByteBuffer bbTexCoords =
+ ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
+ bbTexCoords.order(ByteOrder.nativeOrder());
+ quadTexCoord = bbTexCoords.asFloatBuffer();
+ quadTexCoord.put(QUAD_TEXCOORDS);
+ quadTexCoord.position(0);
+
+ ByteBuffer bbTexCoordsTransformed =
+ ByteBuffer.allocateDirect(numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
+ bbTexCoordsTransformed.order(ByteOrder.nativeOrder());
+ quadTexCoordTransformed = bbTexCoordsTransformed.asFloatBuffer();
+
+ int vertexShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.screenquad_vertex);
+ int fragmentShader =
+ ShaderUtil.loadGLShader(
+ TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.screenquad_fragment_oes);
+
+ quadProgram = GLES20.glCreateProgram();
+ GLES20.glAttachShader(quadProgram, vertexShader);
+ GLES20.glAttachShader(quadProgram, fragmentShader);
+ GLES20.glLinkProgram(quadProgram);
+ GLES20.glUseProgram(quadProgram);
+
+ ShaderUtil.checkGLError(TAG, "Program creation");
+
+ quadPositionParam = GLES20.glGetAttribLocation(quadProgram, "a_Position");
+ quadTexCoordParam = GLES20.glGetAttribLocation(quadProgram, "a_TexCoord");
+
+ ShaderUtil.checkGLError(TAG, "Program parameters");
+ }
+
+ /**
+ * Draws the AR background image. The image will be drawn such that virtual content rendered with
+ * the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)} and
+ * {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will
+ * accurately follow static physical objects. This must be called <b>before</b> drawing virtual
+ * content.
+ *
+ * @param frame The last {@code Frame} returned by {@link Session#update()}.
+ */
+ public void draw(Frame frame) {
+ // If display rotation changed (also includes view size change), we need to re-query the uv
+ // coordinates for the screen rect, as they may have changed as well.
+ if (frame.hasDisplayGeometryChanged()) {
+ frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
}
- /**
- * Allocates and initializes OpenGL resources needed by the background renderer. Must be
- * called on the OpenGL thread, typically in
- * {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}.
- *
- * @param context Needed to access shader source.
- */
- public void createOnGlThread(Context context) {
- // Generate the background texture.
- int[] textures = new int[1];
- GLES20.glGenTextures(1, textures, 0);
- mTextureId = textures[0];
- int textureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
- GLES20.glBindTexture(textureTarget, mTextureId);
- GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
- GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
- GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
+ // No need to test or write depth, the screen quad has arbitrary depth, and is expected
+ // to be drawn first.
+ GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+ GLES20.glDepthMask(false);
- int numVertices = 4;
- if (numVertices != QUAD_COORDS.length / COORDS_PER_VERTEX) {
- throw new RuntimeException("Unexpected number of vertices in BackgroundRenderer.");
- }
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
- ByteBuffer bbVertices = ByteBuffer.allocateDirect(QUAD_COORDS.length * FLOAT_SIZE);
- bbVertices.order(ByteOrder.nativeOrder());
- mQuadVertices = bbVertices.asFloatBuffer();
- mQuadVertices.put(QUAD_COORDS);
- mQuadVertices.position(0);
+ GLES20.glUseProgram(quadProgram);
- ByteBuffer bbTexCoords = ByteBuffer.allocateDirect(
- numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
- bbTexCoords.order(ByteOrder.nativeOrder());
- mQuadTexCoord = bbTexCoords.asFloatBuffer();
- mQuadTexCoord.put(QUAD_TEXCOORDS);
- mQuadTexCoord.position(0);
+ // Set the vertex positions.
+ GLES20.glVertexAttribPointer(
+ quadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices);
- ByteBuffer bbTexCoordsTransformed = ByteBuffer.allocateDirect(
- numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
- bbTexCoordsTransformed.order(ByteOrder.nativeOrder());
- mQuadTexCoordTransformed = bbTexCoordsTransformed.asFloatBuffer();
+ // Set the texture coordinates.
+ GLES20.glVertexAttribPointer(
+ quadTexCoordParam,
+ TEXCOORDS_PER_VERTEX,
+ GLES20.GL_FLOAT,
+ false,
+ 0,
+ quadTexCoordTransformed);
- int vertexShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_VERTEX_SHADER, R.raw.screenquad_vertex);
- int fragmentShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_FRAGMENT_SHADER, R.raw.screenquad_fragment_oes);
+ // Enable vertex arrays
+ GLES20.glEnableVertexAttribArray(quadPositionParam);
+ GLES20.glEnableVertexAttribArray(quadTexCoordParam);
- mQuadProgram = GLES20.glCreateProgram();
- GLES20.glAttachShader(mQuadProgram, vertexShader);
- GLES20.glAttachShader(mQuadProgram, fragmentShader);
- GLES20.glLinkProgram(mQuadProgram);
- GLES20.glUseProgram(mQuadProgram);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
- ShaderUtil.checkGLError(TAG, "Program creation");
+ // Disable vertex arrays
+ GLES20.glDisableVertexAttribArray(quadPositionParam);
+ GLES20.glDisableVertexAttribArray(quadTexCoordParam);
- mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
- mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
+ // Restore the depth state for further drawing.
+ GLES20.glDepthMask(true);
+ GLES20.glEnable(GLES20.GL_DEPTH_TEST);
- ShaderUtil.checkGLError(TAG, "Program parameters");
- }
+ ShaderUtil.checkGLError(TAG, "Draw");
+ }
- /**
- * Draws the AR background image. The image will be drawn such that virtual content rendered
- * with the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)}
- * and {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will
- * accurately follow static physical objects.
- * This must be called <b>before</b> drawing virtual content.
- *
- * @param frame The last {@code Frame} returned by {@link Session#update()}.
- */
- public void draw(Frame frame) {
- // If display rotation changed (also includes view size change), we need to re-query the uv
- // coordinates for the screen rect, as they may have changed as well.
- if (frame.hasDisplayGeometryChanged()) {
- frame.transformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed);
- }
+ private static final float[] QUAD_COORDS =
+ new float[] {
+ -1.0f, -1.0f, 0.0f, -1.0f, +1.0f, 0.0f, +1.0f, -1.0f, 0.0f, +1.0f, +1.0f, 0.0f,
+ };
- // No need to test or write depth, the screen quad has arbitrary depth, and is expected
- // to be drawn first.
- GLES20.glDisable(GLES20.GL_DEPTH_TEST);
- GLES20.glDepthMask(false);
-
- GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
-
- GLES20.glUseProgram(mQuadProgram);
-
- // Set the vertex positions.
- GLES20.glVertexAttribPointer(
- mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices);
-
- // Set the texture coordinates.
- GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
- GLES20.GL_FLOAT, false, 0, mQuadTexCoordTransformed);
-
- // Enable vertex arrays
- GLES20.glEnableVertexAttribArray(mQuadPositionParam);
- GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);
-
- GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
-
- // Disable vertex arrays
- GLES20.glDisableVertexAttribArray(mQuadPositionParam);
- GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);
-
- // Restore the depth state for further drawing.
- GLES20.glDepthMask(true);
- GLES20.glEnable(GLES20.GL_DEPTH_TEST);
-
- ShaderUtil.checkGLError(TAG, "Draw");
- }
-
- private static final float[] QUAD_COORDS = new float[]{
- -1.0f, -1.0f, 0.0f,
- -1.0f, +1.0f, 0.0f,
- +1.0f, -1.0f, 0.0f,
- +1.0f, +1.0f, 0.0f,
- };
-
- private static final float[] QUAD_TEXCOORDS = new float[]{
- 0.0f, 1.0f,
- 0.0f, 0.0f,
- 1.0f, 1.0f,
- 1.0f, 0.0f,
- };
+ private static final float[] QUAD_TEXCOORDS =
+ new float[] {
+ 0.0f, 1.0f,
+ 0.0f, 0.0f,
+ 1.0f, 1.0f,
+ 1.0f, 0.0f,
+ };
}
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ObjectRenderer.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ObjectRenderer.java
index 3117588..a4b1288 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ObjectRenderer.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ObjectRenderer.java
@@ -33,335 +33,332 @@
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
-/**
- * Renders an object loaded from an OBJ file in OpenGL.
- */
+/** Renders an object loaded from an OBJ file in OpenGL. */
public class ObjectRenderer {
- private static final String TAG = ObjectRenderer.class.getSimpleName();
+ private static final String TAG = ObjectRenderer.class.getSimpleName();
- /**
- * Blend mode.
- *
- * @see #setBlendMode(BlendMode)
- */
- public enum BlendMode {
- /** Multiplies the destination color by the source alpha. */
- Shadow,
- /** Normal alpha blending. */
- Grid
+ /**
+ * Blend mode.
+ *
+ * @see #setBlendMode(BlendMode)
+ */
+ public enum BlendMode {
+ /** Multiplies the destination color by the source alpha. */
+ Shadow,
+ /** Normal alpha blending. */
+ Grid
+ }
+
+ private static final int COORDS_PER_VERTEX = 3;
+
+ // Note: the last component must be zero to avoid applying the translational part of the matrix.
+ private static final float[] LIGHT_DIRECTION = new float[] {0.250f, 0.866f, 0.433f, 0.0f};
+ private final float[] viewLightDirection = new float[4];
+
+ // Object vertex buffer variables.
+ private int vertexBufferId;
+ private int verticesBaseAddress;
+ private int texCoordsBaseAddress;
+ private int normalsBaseAddress;
+ private int indexBufferId;
+ private int indexCount;
+
+ private int program;
+ private final int[] textures = new int[1];
+
+ // Shader location: model view projection matrix.
+ private int modelViewUniform;
+ private int modelViewProjectionUniform;
+
+ // Shader location: object attributes.
+ private int positionAttribute;
+ private int normalAttribute;
+ private int texCoordAttribute;
+
+ // Shader location: texture sampler.
+ private int textureUniform;
+
+ // Shader location: environment properties.
+ private int lightingParametersUniform;
+
+ // Shader location: material properties.
+ private int materialParametersUniform;
+
+ private BlendMode blendMode = null;
+
+ // Temporary matrices allocated here to reduce number of allocations for each frame.
+ private final float[] modelMatrix = new float[16];
+ private final float[] modelViewMatrix = new float[16];
+ private final float[] modelViewProjectionMatrix = new float[16];
+
+ // Set some default material properties to use for lighting.
+ private float ambient = 0.3f;
+ private float diffuse = 1.0f;
+ private float specular = 1.0f;
+ private float specularPower = 6.0f;
+
+ public ObjectRenderer() {}
+
+ /**
+ * Creates and initializes OpenGL resources needed for rendering the model.
+ *
+ * @param context Context for loading the shader and below-named model and texture assets.
+ * @param objAssetName Name of the OBJ file containing the model geometry.
+ * @param diffuseTextureAssetName Name of the PNG file containing the diffuse texture map.
+ */
+ public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName)
+ throws IOException {
+ // Read the texture.
+ Bitmap textureBitmap =
+ BitmapFactory.decodeStream(context.getAssets().open(diffuseTextureAssetName));
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glGenTextures(textures.length, textures, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+
+ GLES20.glTexParameteri(
+ GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0);
+ GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+
+ textureBitmap.recycle();
+
+ ShaderUtil.checkGLError(TAG, "Texture loading");
+
+ // Read the obj file.
+ InputStream objInputStream = context.getAssets().open(objAssetName);
+ Obj obj = ObjReader.read(objInputStream);
+
+ // Prepare the Obj so that its structure is suitable for
+ // rendering with OpenGL:
+ // 1. Triangulate it
+ // 2. Make sure that texture coordinates are not ambiguous
+ // 3. Make sure that normals are not ambiguous
+ // 4. Convert it to single-indexed data
+ obj = ObjUtils.convertToRenderable(obj);
+
+ // OpenGL does not use Java arrays. ByteBuffers are used instead to provide data in a format
+ // that OpenGL understands.
+
+ // Obtain the data from the OBJ, as direct buffers:
+ IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3);
+ FloatBuffer vertices = ObjData.getVertices(obj);
+ FloatBuffer texCoords = ObjData.getTexCoords(obj, 2);
+ FloatBuffer normals = ObjData.getNormals(obj);
+
+ // Convert int indices to shorts for GL ES 2.0 compatibility
+ ShortBuffer indices =
+ ByteBuffer.allocateDirect(2 * wideIndices.limit())
+ .order(ByteOrder.nativeOrder())
+ .asShortBuffer();
+ while (wideIndices.hasRemaining()) {
+ indices.put((short) wideIndices.get());
+ }
+ indices.rewind();
+
+ int[] buffers = new int[2];
+ GLES20.glGenBuffers(2, buffers, 0);
+ vertexBufferId = buffers[0];
+ indexBufferId = buffers[1];
+
+ // Load vertex buffer
+ verticesBaseAddress = 0;
+ texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit();
+ normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit();
+ final int totalBytes = normalsBaseAddress + 4 * normals.limit();
+
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId);
+ GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW);
+ GLES20.glBufferSubData(
+ GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices);
+ GLES20.glBufferSubData(
+ GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords);
+ GLES20.glBufferSubData(
+ GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+
+ // Load index buffer
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId);
+ indexCount = indices.limit();
+ GLES20.glBufferData(
+ GLES20.GL_ELEMENT_ARRAY_BUFFER, 2 * indexCount, indices, GLES20.GL_STATIC_DRAW);
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
+
+ ShaderUtil.checkGLError(TAG, "OBJ buffer load");
+
+ final int vertexShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.object_vertex);
+ final int fragmentShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.object_fragment);
+
+ program = GLES20.glCreateProgram();
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ GLES20.glUseProgram(program);
+
+ ShaderUtil.checkGLError(TAG, "Program creation");
+
+ modelViewUniform = GLES20.glGetUniformLocation(program, "u_ModelView");
+ modelViewProjectionUniform = GLES20.glGetUniformLocation(program, "u_ModelViewProjection");
+
+ positionAttribute = GLES20.glGetAttribLocation(program, "a_Position");
+ normalAttribute = GLES20.glGetAttribLocation(program, "a_Normal");
+ texCoordAttribute = GLES20.glGetAttribLocation(program, "a_TexCoord");
+
+ textureUniform = GLES20.glGetUniformLocation(program, "u_Texture");
+
+ lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightingParameters");
+ materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters");
+
+ ShaderUtil.checkGLError(TAG, "Program parameters");
+
+ Matrix.setIdentityM(modelMatrix, 0);
+ }
+
+ /**
+ * Selects the blending mode for rendering.
+ *
+ * @param blendMode The blending mode. Null indicates no blending (opaque rendering).
+ */
+ public void setBlendMode(BlendMode blendMode) {
+ this.blendMode = blendMode;
+ }
+
+ /**
+ * Updates the object model matrix and applies scaling.
+ *
+ * @param modelMatrix A 4x4 model-to-world transformation matrix, stored in column-major order.
+ * @param scaleFactor A separate scaling factor to apply before the {@code modelMatrix}.
+ * @see android.opengl.Matrix
+ */
+ public void updateModelMatrix(float[] modelMatrix, float scaleFactor) {
+ float[] scaleMatrix = new float[16];
+ Matrix.setIdentityM(scaleMatrix, 0);
+ scaleMatrix[0] = scaleFactor;
+ scaleMatrix[5] = scaleFactor;
+ scaleMatrix[10] = scaleFactor;
+ Matrix.multiplyMM(this.modelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
+ }
+
+ /**
+ * Sets the surface characteristics of the rendered model.
+ *
+ * @param ambient Intensity of non-directional surface illumination.
+ * @param diffuse Diffuse (matte) surface reflectivity.
+ * @param specular Specular (shiny) surface reflectivity.
+ * @param specularPower Surface shininess. Larger values result in a smaller, sharper specular
+ * highlight.
+ */
+ public void setMaterialProperties(
+ float ambient, float diffuse, float specular, float specularPower) {
+ this.ambient = ambient;
+ this.diffuse = diffuse;
+ this.specular = specular;
+ this.specularPower = specularPower;
+ }
+
+ /**
+ * Draws the model.
+ *
+ * @param cameraView A 4x4 view matrix, in column-major order.
+ * @param cameraPerspective A 4x4 projection matrix, in column-major order.
+ * @param lightIntensity Illumination intensity. Combined with diffuse and specular material
+ * properties.
+ * @see #setBlendMode(BlendMode)
+ * @see #updateModelMatrix(float[], float)
+ * @see #setMaterialProperties(float, float, float, float)
+ * @see android.opengl.Matrix
+ */
+ public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity) {
+
+ ShaderUtil.checkGLError(TAG, "Before draw");
+
+ // Build the ModelView and ModelViewProjection matrices
+ // for calculating object position and light.
+ Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0);
+ Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0);
+
+ GLES20.glUseProgram(program);
+
+ // Set the lighting environment properties.
+ Matrix.multiplyMV(viewLightDirection, 0, modelViewMatrix, 0, LIGHT_DIRECTION, 0);
+ normalizeVec3(viewLightDirection);
+ GLES20.glUniform4f(
+ lightingParametersUniform,
+ viewLightDirection[0],
+ viewLightDirection[1],
+ viewLightDirection[2],
+ lightIntensity);
+
+ // Set the object material properties.
+ GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower);
+
+ // Attach the object texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+ GLES20.glUniform1i(textureUniform, 0);
+
+ // Set the vertex attributes.
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId);
+
+ GLES20.glVertexAttribPointer(
+ positionAttribute, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, verticesBaseAddress);
+ GLES20.glVertexAttribPointer(normalAttribute, 3, GLES20.GL_FLOAT, false, 0, normalsBaseAddress);
+ GLES20.glVertexAttribPointer(
+ texCoordAttribute, 2, GLES20.GL_FLOAT, false, 0, texCoordsBaseAddress);
+
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+
+ // Set the ModelViewProjection matrix in the shader.
+ GLES20.glUniformMatrix4fv(modelViewUniform, 1, false, modelViewMatrix, 0);
+ GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0);
+
+ // Enable vertex arrays
+ GLES20.glEnableVertexAttribArray(positionAttribute);
+ GLES20.glEnableVertexAttribArray(normalAttribute);
+ GLES20.glEnableVertexAttribArray(texCoordAttribute);
+
+ if (blendMode != null) {
+ GLES20.glDepthMask(false);
+ GLES20.glEnable(GLES20.GL_BLEND);
+ switch (blendMode) {
+ case Shadow:
+ // Multiplicative blending function for Shadow.
+ GLES20.glBlendFunc(GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+ break;
+ case Grid:
+ // Grid, additive blending function.
+ GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+ break;
+ }
}
- private static final int COORDS_PER_VERTEX = 3;
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId);
+ GLES20.glDrawElements(GLES20.GL_TRIANGLES, indexCount, GLES20.GL_UNSIGNED_SHORT, 0);
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
- // Note: the last component must be zero to avoid applying the translational part of the matrix.
- private static final float[] LIGHT_DIRECTION = new float[] { 0.250f, 0.866f, 0.433f, 0.0f };
- private float[] mViewLightDirection = new float[4];
-
- // Object vertex buffer variables.
- private int mVertexBufferId;
- private int mVerticesBaseAddress;
- private int mTexCoordsBaseAddress;
- private int mNormalsBaseAddress;
- private int mIndexBufferId;
- private int mIndexCount;
-
- private int mProgram;
- private int[] mTextures = new int[1];
-
- // Shader location: model view projection matrix.
- private int mModelViewUniform;
- private int mModelViewProjectionUniform;
-
- // Shader location: object attributes.
- private int mPositionAttribute;
- private int mNormalAttribute;
- private int mTexCoordAttribute;
-
- // Shader location: texture sampler.
- private int mTextureUniform;
-
- // Shader location: environment properties.
- private int mLightingParametersUniform;
-
- // Shader location: material properties.
- private int mMaterialParametersUniform;
-
- private BlendMode mBlendMode = null;
-
- // Temporary matrices allocated here to reduce number of allocations for each frame.
- private float[] mModelMatrix = new float[16];
- private float[] mModelViewMatrix = new float[16];
- private float[] mModelViewProjectionMatrix = new float[16];
-
- // Set some default material properties to use for lighting.
- private float mAmbient = 0.3f;
- private float mDiffuse = 1.0f;
- private float mSpecular = 1.0f;
- private float mSpecularPower = 6.0f;
-
- public ObjectRenderer() {
+ if (blendMode != null) {
+ GLES20.glDisable(GLES20.GL_BLEND);
+ GLES20.glDepthMask(true);
}
- /**
- * Creates and initializes OpenGL resources needed for rendering the model.
- *
- * @param context Context for loading the shader and below-named model and texture assets.
- * @param objAssetName Name of the OBJ file containing the model geometry.
- * @param diffuseTextureAssetName Name of the PNG file containing the diffuse texture map.
- */
- public void createOnGlThread(Context context, String objAssetName,
- String diffuseTextureAssetName) throws IOException {
- // Read the texture.
- Bitmap textureBitmap = BitmapFactory.decodeStream(
- context.getAssets().open(diffuseTextureAssetName));
+ // Disable vertex arrays
+ GLES20.glDisableVertexAttribArray(positionAttribute);
+ GLES20.glDisableVertexAttribArray(normalAttribute);
+ GLES20.glDisableVertexAttribArray(texCoordAttribute);
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- GLES20.glGenTextures(mTextures.length, mTextures, 0);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
- GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
- GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
- GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0);
- GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ ShaderUtil.checkGLError(TAG, "After draw");
+ }
- textureBitmap.recycle();
-
- ShaderUtil.checkGLError(TAG, "Texture loading");
-
- // Read the obj file.
- InputStream objInputStream = context.getAssets().open(objAssetName);
- Obj obj = ObjReader.read(objInputStream);
-
- // Prepare the Obj so that its structure is suitable for
- // rendering with OpenGL:
- // 1. Triangulate it
- // 2. Make sure that texture coordinates are not ambiguous
- // 3. Make sure that normals are not ambiguous
- // 4. Convert it to single-indexed data
- obj = ObjUtils.convertToRenderable(obj);
-
- // OpenGL does not use Java arrays. ByteBuffers are used instead to provide data in a format
- // that OpenGL understands.
-
- // Obtain the data from the OBJ, as direct buffers:
- IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3);
- FloatBuffer vertices = ObjData.getVertices(obj);
- FloatBuffer texCoords = ObjData.getTexCoords(obj, 2);
- FloatBuffer normals = ObjData.getNormals(obj);
-
- // Convert int indices to shorts for GL ES 2.0 compatibility
- ShortBuffer indices = ByteBuffer.allocateDirect(2 * wideIndices.limit())
- .order(ByteOrder.nativeOrder()).asShortBuffer();
- while (wideIndices.hasRemaining()) {
- indices.put((short) wideIndices.get());
- }
- indices.rewind();
-
- int[] buffers = new int[2];
- GLES20.glGenBuffers(2, buffers, 0);
- mVertexBufferId = buffers[0];
- mIndexBufferId = buffers[1];
-
- // Load vertex buffer
- mVerticesBaseAddress = 0;
- mTexCoordsBaseAddress = mVerticesBaseAddress + 4 * vertices.limit();
- mNormalsBaseAddress = mTexCoordsBaseAddress + 4 * texCoords.limit();
- final int totalBytes = mNormalsBaseAddress + 4 * normals.limit();
-
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBufferId);
- GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW);
- GLES20.glBufferSubData(
- GLES20.GL_ARRAY_BUFFER, mVerticesBaseAddress, 4 * vertices.limit(), vertices);
- GLES20.glBufferSubData(
- GLES20.GL_ARRAY_BUFFER, mTexCoordsBaseAddress, 4 * texCoords.limit(), texCoords);
- GLES20.glBufferSubData(
- GLES20.GL_ARRAY_BUFFER, mNormalsBaseAddress, 4 * normals.limit(), normals);
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
-
- // Load index buffer
- GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, mIndexBufferId);
- mIndexCount = indices.limit();
- GLES20.glBufferData(
- GLES20.GL_ELEMENT_ARRAY_BUFFER, 2 * mIndexCount, indices, GLES20.GL_STATIC_DRAW);
- GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
-
- ShaderUtil.checkGLError(TAG, "OBJ buffer load");
-
- final int vertexShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_VERTEX_SHADER, R.raw.object_vertex);
- final int fragmentShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_FRAGMENT_SHADER, R.raw.object_fragment);
-
- mProgram = GLES20.glCreateProgram();
- GLES20.glAttachShader(mProgram, vertexShader);
- GLES20.glAttachShader(mProgram, fragmentShader);
- GLES20.glLinkProgram(mProgram);
- GLES20.glUseProgram(mProgram);
-
- ShaderUtil.checkGLError(TAG, "Program creation");
-
- mModelViewUniform = GLES20.glGetUniformLocation(mProgram, "u_ModelView");
- mModelViewProjectionUniform =
- GLES20.glGetUniformLocation(mProgram, "u_ModelViewProjection");
-
- mPositionAttribute = GLES20.glGetAttribLocation(mProgram, "a_Position");
- mNormalAttribute = GLES20.glGetAttribLocation(mProgram, "a_Normal");
- mTexCoordAttribute = GLES20.glGetAttribLocation(mProgram, "a_TexCoord");
-
- mTextureUniform = GLES20.glGetUniformLocation(mProgram, "u_Texture");
-
- mLightingParametersUniform = GLES20.glGetUniformLocation(mProgram, "u_LightingParameters");
- mMaterialParametersUniform = GLES20.glGetUniformLocation(mProgram, "u_MaterialParameters");
-
- ShaderUtil.checkGLError(TAG, "Program parameters");
-
- Matrix.setIdentityM(mModelMatrix, 0);
- }
-
- /**
- * Selects the blending mode for rendering.
- *
- * @param blendMode The blending mode. Null indicates no blending (opaque rendering).
- */
- public void setBlendMode(BlendMode blendMode) {
- mBlendMode = blendMode;
- }
-
- /**
- * Updates the object model matrix and applies scaling.
- *
- * @param modelMatrix A 4x4 model-to-world transformation matrix, stored in column-major order.
- * @param scaleFactor A separate scaling factor to apply before the {@code modelMatrix}.
- * @see android.opengl.Matrix
- */
- public void updateModelMatrix(float[] modelMatrix, float scaleFactor) {
- float[] scaleMatrix = new float[16];
- Matrix.setIdentityM(scaleMatrix, 0);
- scaleMatrix[0] = scaleFactor;
- scaleMatrix[5] = scaleFactor;
- scaleMatrix[10] = scaleFactor;
- Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
- }
-
- /**
- * Sets the surface characteristics of the rendered model.
- *
- * @param ambient Intensity of non-directional surface illumination.
- * @param diffuse Diffuse (matte) surface reflectivity.
- * @param specular Specular (shiny) surface reflectivity.
- * @param specularPower Surface shininess. Larger values result in a smaller, sharper
- * specular highlight.
- */
- public void setMaterialProperties(
- float ambient, float diffuse, float specular, float specularPower) {
- mAmbient = ambient;
- mDiffuse = diffuse;
- mSpecular = specular;
- mSpecularPower = specularPower;
- }
-
- /**
- * Draws the model.
- *
- * @param cameraView A 4x4 view matrix, in column-major order.
- * @param cameraPerspective A 4x4 projection matrix, in column-major order.
- * @param lightIntensity Illumination intensity. Combined with diffuse and specular material
- * properties.
- * @see #setBlendMode(BlendMode)
- * @see #updateModelMatrix(float[], float)
- * @see #setMaterialProperties(float, float, float, float)
- * @see android.opengl.Matrix
- */
- public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity) {
-
- ShaderUtil.checkGLError(TAG, "Before draw");
-
- // Build the ModelView and ModelViewProjection matrices
- // for calculating object position and light.
- Matrix.multiplyMM(mModelViewMatrix, 0, cameraView, 0, mModelMatrix, 0);
- Matrix.multiplyMM(mModelViewProjectionMatrix, 0, cameraPerspective, 0, mModelViewMatrix, 0);
-
- GLES20.glUseProgram(mProgram);
-
- // Set the lighting environment properties.
- Matrix.multiplyMV(mViewLightDirection, 0, mModelViewMatrix, 0, LIGHT_DIRECTION, 0);
- normalizeVec3(mViewLightDirection);
- GLES20.glUniform4f(mLightingParametersUniform,
- mViewLightDirection[0], mViewLightDirection[1], mViewLightDirection[2], lightIntensity);
-
- // Set the object material properties.
- GLES20.glUniform4f(mMaterialParametersUniform, mAmbient, mDiffuse, mSpecular,
- mSpecularPower);
-
- // Attach the object texture.
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
- GLES20.glUniform1i(mTextureUniform, 0);
-
- // Set the vertex attributes.
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBufferId);
-
- GLES20.glVertexAttribPointer(
- mPositionAttribute, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mVerticesBaseAddress);
- GLES20.glVertexAttribPointer(
- mNormalAttribute, 3, GLES20.GL_FLOAT, false, 0, mNormalsBaseAddress);
- GLES20.glVertexAttribPointer(
- mTexCoordAttribute, 2, GLES20.GL_FLOAT, false, 0, mTexCoordsBaseAddress);
-
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
-
- // Set the ModelViewProjection matrix in the shader.
- GLES20.glUniformMatrix4fv(
- mModelViewUniform, 1, false, mModelViewMatrix, 0);
- GLES20.glUniformMatrix4fv(
- mModelViewProjectionUniform, 1, false, mModelViewProjectionMatrix, 0);
-
- // Enable vertex arrays
- GLES20.glEnableVertexAttribArray(mPositionAttribute);
- GLES20.glEnableVertexAttribArray(mNormalAttribute);
- GLES20.glEnableVertexAttribArray(mTexCoordAttribute);
-
- if (mBlendMode != null) {
- GLES20.glDepthMask(false);
- GLES20.glEnable(GLES20.GL_BLEND);
- switch (mBlendMode) {
- case Shadow:
- // Multiplicative blending function for Shadow.
- GLES20.glBlendFunc(GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA);
- break;
- case Grid:
- // Grid, additive blending function.
- GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
- break;
- }
- }
-
- GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, mIndexBufferId);
- GLES20.glDrawElements(GLES20.GL_TRIANGLES, mIndexCount, GLES20.GL_UNSIGNED_SHORT, 0);
- GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
-
- if (mBlendMode != null) {
- GLES20.glDisable(GLES20.GL_BLEND);
- GLES20.glDepthMask(true);
- }
-
- // Disable vertex arrays
- GLES20.glDisableVertexAttribArray(mPositionAttribute);
- GLES20.glDisableVertexAttribArray(mNormalAttribute);
- GLES20.glDisableVertexAttribArray(mTexCoordAttribute);
-
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
-
- ShaderUtil.checkGLError(TAG, "After draw");
- }
-
- private static void normalizeVec3(float[] v) {
- float reciprocalLength = 1.0f / (float) Math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]);
- v[0] *= reciprocalLength;
- v[1] *= reciprocalLength;
- v[2] *= reciprocalLength;
- }
+ private static void normalizeVec3(float[] v) {
+ float reciprocalLength = 1.0f / (float) Math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]);
+ v[0] *= reciprocalLength;
+ v[1] *= reciprocalLength;
+ v[2] *= reciprocalLength;
+ }
}
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PlaneRenderer.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PlaneRenderer.java
index 636d971..2935acf 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PlaneRenderer.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PlaneRenderer.java
@@ -21,11 +21,10 @@
import android.opengl.GLSurfaceView;
import android.opengl.GLUtils;
import android.opengl.Matrix;
-
import com.google.ar.core.Camera;
import com.google.ar.core.Plane;
import com.google.ar.core.Pose;
-import com.google.ar.core.Trackable.TrackingState;
+import com.google.ar.core.TrackingState;
import com.google.ar.core.examples.java.helloar.R;
import java.io.IOException;
import java.nio.ByteBuffer;
@@ -42,387 +41,393 @@
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
-/**
- * Renders the detected AR planes.
- */
+/** Renders the detected AR planes. */
public class PlaneRenderer {
- private static final String TAG = PlaneRenderer.class.getSimpleName();
+ private static final String TAG = PlaneRenderer.class.getSimpleName();
- private static final int BYTES_PER_FLOAT = Float.SIZE / 8;
- private static final int BYTES_PER_SHORT = Short.SIZE / 8;
- private static final int COORDS_PER_VERTEX = 3; // x, z, alpha
+ private static final int BYTES_PER_FLOAT = Float.SIZE / 8;
+ private static final int BYTES_PER_SHORT = Short.SIZE / 8;
+ private static final int COORDS_PER_VERTEX = 3; // x, z, alpha
- private static final int VERTS_PER_BOUNDARY_VERT = 2;
- private static final int INDICES_PER_BOUNDARY_VERT = 3;
- private static final int INITIAL_BUFFER_BOUNDARY_VERTS = 64;
+ private static final int VERTS_PER_BOUNDARY_VERT = 2;
+ private static final int INDICES_PER_BOUNDARY_VERT = 3;
+ private static final int INITIAL_BUFFER_BOUNDARY_VERTS = 64;
- private static final int INITIAL_VERTEX_BUFFER_SIZE_BYTES =
- BYTES_PER_FLOAT * COORDS_PER_VERTEX * VERTS_PER_BOUNDARY_VERT
- * INITIAL_BUFFER_BOUNDARY_VERTS;
+ private static final int INITIAL_VERTEX_BUFFER_SIZE_BYTES =
+ BYTES_PER_FLOAT * COORDS_PER_VERTEX * VERTS_PER_BOUNDARY_VERT * INITIAL_BUFFER_BOUNDARY_VERTS;
- private static final int INITIAL_INDEX_BUFFER_SIZE_BYTES =
- BYTES_PER_SHORT * INDICES_PER_BOUNDARY_VERT * INDICES_PER_BOUNDARY_VERT
- * INITIAL_BUFFER_BOUNDARY_VERTS;
+ private static final int INITIAL_INDEX_BUFFER_SIZE_BYTES =
+ BYTES_PER_SHORT
+ * INDICES_PER_BOUNDARY_VERT
+ * INDICES_PER_BOUNDARY_VERT
+ * INITIAL_BUFFER_BOUNDARY_VERTS;
- private static final float FADE_RADIUS_M = 0.25f;
- private static final float DOTS_PER_METER = 10.0f;
- private static final float EQUILATERAL_TRIANGLE_SCALE = (float) (1 / Math.sqrt(3));
+ private static final float FADE_RADIUS_M = 0.25f;
+ private static final float DOTS_PER_METER = 10.0f;
+ private static final float EQUILATERAL_TRIANGLE_SCALE = (float) (1 / Math.sqrt(3));
- // Using the "signed distance field" approach to render sharp lines and circles.
- // {dotThreshold, lineThreshold, lineFadeSpeed, occlusionScale}
- // dotThreshold/lineThreshold: red/green intensity above which dots/lines are present
- // lineFadeShrink: lines will fade in between alpha = 1-(1/lineFadeShrink) and 1.0
- // occlusionShrink: occluded planes will fade out between alpha = 0 and 1/occlusionShrink
- private static final float[] GRID_CONTROL = {0.2f, 0.4f, 2.0f, 1.5f};
+ // Using the "signed distance field" approach to render sharp lines and circles.
+ // {dotThreshold, lineThreshold, lineFadeSpeed, occlusionScale}
+ // dotThreshold/lineThreshold: red/green intensity above which dots/lines are present
+ // lineFadeShrink: lines will fade in between alpha = 1-(1/lineFadeShrink) and 1.0
+ // occlusionShrink: occluded planes will fade out between alpha = 0 and 1/occlusionShrink
+ private static final float[] GRID_CONTROL = {0.2f, 0.4f, 2.0f, 1.5f};
- private int mPlaneProgram;
- private int[] mTextures = new int[1];
+ private int planeProgram;
+ private final int[] textures = new int[1];
- private int mPlaneXZPositionAlphaAttribute;
+ private int planeXZPositionAlphaAttribute;
- private int mPlaneModelUniform;
- private int mPlaneModelViewProjectionUniform;
- private int mTextureUniform;
- private int mLineColorUniform;
- private int mDotColorUniform;
- private int mGridControlUniform;
- private int mPlaneUvMatrixUniform;
+ private int planeModelUniform;
+ private int planeModelViewProjectionUniform;
+ private int textureUniform;
+ private int lineColorUniform;
+ private int dotColorUniform;
+ private int gridControlUniform;
+ private int planeUvMatrixUniform;
- private FloatBuffer mVertexBuffer = ByteBuffer.allocateDirect(INITIAL_VERTEX_BUFFER_SIZE_BYTES)
- .order(ByteOrder.nativeOrder()).asFloatBuffer();
- private ShortBuffer mIndexBuffer = ByteBuffer.allocateDirect(INITIAL_INDEX_BUFFER_SIZE_BYTES)
- .order(ByteOrder.nativeOrder()).asShortBuffer();
+ private FloatBuffer vertexBuffer =
+ ByteBuffer.allocateDirect(INITIAL_VERTEX_BUFFER_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder())
+ .asFloatBuffer();
+ private ShortBuffer indexBuffer =
+ ByteBuffer.allocateDirect(INITIAL_INDEX_BUFFER_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder())
+ .asShortBuffer();
- // Temporary lists/matrices allocated here to reduce number of allocations for each frame.
- private float[] mModelMatrix = new float[16];
- private float[] mModelViewMatrix = new float[16];
- private float[] mModelViewProjectionMatrix = new float[16];
- private float[] mPlaneColor = new float[4];
- private float[] mPlaneAngleUvMatrix = new float[4]; // 2x2 rotation matrix applied to uv coords.
+ // Temporary lists/matrices allocated here to reduce number of allocations for each frame.
+ private final float[] modelMatrix = new float[16];
+ private final float[] modelViewMatrix = new float[16];
+ private final float[] modelViewProjectionMatrix = new float[16];
+ private final float[] planeColor = new float[4];
+ private final float[] planeAngleUvMatrix =
+ new float[4]; // 2x2 rotation matrix applied to uv coords.
- private Map<Plane, Integer> mPlaneIndexMap = new HashMap<>();
+ private final Map<Plane, Integer> planeIndexMap = new HashMap<>();
- public PlaneRenderer() {
+ public PlaneRenderer() {}
+
+ /**
+ * Allocates and initializes OpenGL resources needed by the plane renderer. Must be called on the
+ * OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}.
+ *
+ * @param context Needed to access shader source and texture PNG.
+ * @param gridDistanceTextureName Name of the PNG file containing the grid texture.
+ */
+ public void createOnGlThread(Context context, String gridDistanceTextureName) throws IOException {
+ int vertexShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.plane_vertex);
+ int passthroughShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.plane_fragment);
+
+ planeProgram = GLES20.glCreateProgram();
+ GLES20.glAttachShader(planeProgram, vertexShader);
+ GLES20.glAttachShader(planeProgram, passthroughShader);
+ GLES20.glLinkProgram(planeProgram);
+ GLES20.glUseProgram(planeProgram);
+
+ ShaderUtil.checkGLError(TAG, "Program creation");
+
+ // Read the texture.
+ Bitmap textureBitmap =
+ BitmapFactory.decodeStream(context.getAssets().open(gridDistanceTextureName));
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glGenTextures(textures.length, textures, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+
+ GLES20.glTexParameteri(
+ GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0);
+ GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+
+ ShaderUtil.checkGLError(TAG, "Texture loading");
+
+ planeXZPositionAlphaAttribute = GLES20.glGetAttribLocation(planeProgram, "a_XZPositionAlpha");
+
+ planeModelUniform = GLES20.glGetUniformLocation(planeProgram, "u_Model");
+ planeModelViewProjectionUniform =
+ GLES20.glGetUniformLocation(planeProgram, "u_ModelViewProjection");
+ textureUniform = GLES20.glGetUniformLocation(planeProgram, "u_Texture");
+ lineColorUniform = GLES20.glGetUniformLocation(planeProgram, "u_lineColor");
+ dotColorUniform = GLES20.glGetUniformLocation(planeProgram, "u_dotColor");
+ gridControlUniform = GLES20.glGetUniformLocation(planeProgram, "u_gridControl");
+ planeUvMatrixUniform = GLES20.glGetUniformLocation(planeProgram, "u_PlaneUvMatrix");
+
+ ShaderUtil.checkGLError(TAG, "Program parameters");
+ }
+
+ /** Updates the plane model transform matrix and extents. */
+ private void updatePlaneParameters(
+ float[] planeMatrix, float extentX, float extentZ, FloatBuffer boundary) {
+ System.arraycopy(planeMatrix, 0, modelMatrix, 0, 16);
+ if (boundary == null) {
+ vertexBuffer.limit(0);
+ indexBuffer.limit(0);
+ return;
}
- /**
- * Allocates and initializes OpenGL resources needed by the plane renderer. Must be
- * called on the OpenGL thread, typically in
- * {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}.
- *
- * @param context Needed to access shader source and texture PNG.
- * @param gridDistanceTextureName Name of the PNG file containing the grid texture.
- */
- public void createOnGlThread(Context context, String gridDistanceTextureName)
- throws IOException {
- int vertexShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_VERTEX_SHADER, R.raw.plane_vertex);
- int passthroughShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_FRAGMENT_SHADER, R.raw.plane_fragment);
+ // Generate a new set of vertices and a corresponding triangle strip index set so that
+ // the plane boundary polygon has a fading edge. This is done by making a copy of the
+ // boundary polygon vertices and scaling it down around center to push it inwards. Then
+ // the index buffer is setup accordingly.
+ boundary.rewind();
+ int boundaryVertices = boundary.limit() / 2;
+ int numVertices;
+ int numIndices;
- mPlaneProgram = GLES20.glCreateProgram();
- GLES20.glAttachShader(mPlaneProgram, vertexShader);
- GLES20.glAttachShader(mPlaneProgram, passthroughShader);
- GLES20.glLinkProgram(mPlaneProgram);
- GLES20.glUseProgram(mPlaneProgram);
+ numVertices = boundaryVertices * VERTS_PER_BOUNDARY_VERT;
+ // drawn as GL_TRIANGLE_STRIP with 3n-2 triangles (n-2 for fill, 2n for perimeter).
+ numIndices = boundaryVertices * INDICES_PER_BOUNDARY_VERT;
- ShaderUtil.checkGLError(TAG, "Program creation");
+ if (vertexBuffer.capacity() < numVertices * COORDS_PER_VERTEX) {
+ int size = vertexBuffer.capacity();
+ while (size < numVertices * COORDS_PER_VERTEX) {
+ size *= 2;
+ }
+ vertexBuffer =
+ ByteBuffer.allocateDirect(BYTES_PER_FLOAT * size)
+ .order(ByteOrder.nativeOrder())
+ .asFloatBuffer();
+ }
+ vertexBuffer.rewind();
+ vertexBuffer.limit(numVertices * COORDS_PER_VERTEX);
- // Read the texture.
- Bitmap textureBitmap = BitmapFactory.decodeStream(
- context.getAssets().open(gridDistanceTextureName));
+ if (indexBuffer.capacity() < numIndices) {
+ int size = indexBuffer.capacity();
+ while (size < numIndices) {
+ size *= 2;
+ }
+ indexBuffer =
+ ByteBuffer.allocateDirect(BYTES_PER_SHORT * size)
+ .order(ByteOrder.nativeOrder())
+ .asShortBuffer();
+ }
+ indexBuffer.rewind();
+ indexBuffer.limit(numIndices);
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- GLES20.glGenTextures(mTextures.length, mTextures, 0);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
+ // Note: when either dimension of the bounding box is smaller than 2*FADE_RADIUS_M we
+ // generate a bunch of 0-area triangles. These don't get rendered though so it works
+ // out ok.
+ float xScale = Math.max((extentX - 2 * FADE_RADIUS_M) / extentX, 0.0f);
+ float zScale = Math.max((extentZ - 2 * FADE_RADIUS_M) / extentZ, 0.0f);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
- GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
- GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
- GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
- GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0);
- GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
-
- ShaderUtil.checkGLError(TAG, "Texture loading");
-
- mPlaneXZPositionAlphaAttribute = GLES20.glGetAttribLocation(mPlaneProgram,
- "a_XZPositionAlpha");
-
- mPlaneModelUniform = GLES20.glGetUniformLocation(mPlaneProgram, "u_Model");
- mPlaneModelViewProjectionUniform =
- GLES20.glGetUniformLocation(mPlaneProgram, "u_ModelViewProjection");
- mTextureUniform = GLES20.glGetUniformLocation(mPlaneProgram, "u_Texture");
- mLineColorUniform = GLES20.glGetUniformLocation(mPlaneProgram, "u_lineColor");
- mDotColorUniform = GLES20.glGetUniformLocation(mPlaneProgram, "u_dotColor");
- mGridControlUniform = GLES20.glGetUniformLocation(mPlaneProgram, "u_gridControl");
- mPlaneUvMatrixUniform = GLES20.glGetUniformLocation(mPlaneProgram, "u_PlaneUvMatrix");
-
- ShaderUtil.checkGLError(TAG, "Program parameters");
+ while (boundary.hasRemaining()) {
+ float x = boundary.get();
+ float z = boundary.get();
+ vertexBuffer.put(x);
+ vertexBuffer.put(z);
+ vertexBuffer.put(0.0f);
+ vertexBuffer.put(x * xScale);
+ vertexBuffer.put(z * zScale);
+ vertexBuffer.put(1.0f);
}
- /**
- * Updates the plane model transform matrix and extents.
- */
- private void updatePlaneParameters(float[] planeMatrix, float extentX, float extentZ,
- FloatBuffer boundary) {
- System.arraycopy(planeMatrix, 0, mModelMatrix, 0, 16);
- if (boundary == null) {
- mVertexBuffer.limit(0);
- mIndexBuffer.limit(0);
- return;
- }
-
- // Generate a new set of vertices and a corresponding triangle strip index set so that
- // the plane boundary polygon has a fading edge. This is done by making a copy of the
- // boundary polygon vertices and scaling it down around center to push it inwards. Then
- // the index buffer is setup accordingly.
- boundary.rewind();
- int boundaryVertices = boundary.limit() / 2;
- int numVertices;
- int numIndices;
-
- numVertices = boundaryVertices * VERTS_PER_BOUNDARY_VERT;
- // drawn as GL_TRIANGLE_STRIP with 3n-2 triangles (n-2 for fill, 2n for perimeter).
- numIndices = boundaryVertices * INDICES_PER_BOUNDARY_VERT;
-
- if (mVertexBuffer.capacity() < numVertices * COORDS_PER_VERTEX) {
- int size = mVertexBuffer.capacity();
- while (size < numVertices * COORDS_PER_VERTEX) {
- size *= 2;
- }
- mVertexBuffer = ByteBuffer.allocateDirect(BYTES_PER_FLOAT * size)
- .order(ByteOrder.nativeOrder()).asFloatBuffer();
- }
- mVertexBuffer.rewind();
- mVertexBuffer.limit(numVertices * COORDS_PER_VERTEX);
-
-
- if (mIndexBuffer.capacity() < numIndices) {
- int size = mIndexBuffer.capacity();
- while (size < numIndices) {
- size *= 2;
- }
- mIndexBuffer = ByteBuffer.allocateDirect(BYTES_PER_SHORT * size)
- .order(ByteOrder.nativeOrder()).asShortBuffer();
- }
- mIndexBuffer.rewind();
- mIndexBuffer.limit(numIndices);
-
- // Note: when either dimension of the bounding box is smaller than 2*FADE_RADIUS_M we
- // generate a bunch of 0-area triangles. These don't get rendered though so it works
- // out ok.
- float xScale = Math.max((extentX - 2 * FADE_RADIUS_M) / extentX, 0.0f);
- float zScale = Math.max((extentZ - 2 * FADE_RADIUS_M) / extentZ, 0.0f);
-
- while (boundary.hasRemaining()) {
- float x = boundary.get();
- float z = boundary.get();
- mVertexBuffer.put(x);
- mVertexBuffer.put(z);
- mVertexBuffer.put(0.0f);
- mVertexBuffer.put(x * xScale);
- mVertexBuffer.put(z * zScale);
- mVertexBuffer.put(1.0f);
- }
-
- // step 1, perimeter
- mIndexBuffer.put((short) ((boundaryVertices - 1) * 2));
- for (int i = 0; i < boundaryVertices; ++i) {
- mIndexBuffer.put((short) (i * 2));
- mIndexBuffer.put((short) (i * 2 + 1));
- }
- mIndexBuffer.put((short) 1);
- // This leaves us on the interior edge of the perimeter between the inset vertices
- // for boundary verts n-1 and 0.
-
- // step 2, interior:
- for (int i = 1; i < boundaryVertices / 2; ++i) {
- mIndexBuffer.put((short) ((boundaryVertices - 1 - i) * 2 + 1));
- mIndexBuffer.put((short) (i * 2 + 1));
- }
- if (boundaryVertices % 2 != 0) {
- mIndexBuffer.put((short) ((boundaryVertices / 2) * 2 + 1));
- }
+ // step 1, perimeter
+ indexBuffer.put((short) ((boundaryVertices - 1) * 2));
+ for (int i = 0; i < boundaryVertices; ++i) {
+ indexBuffer.put((short) (i * 2));
+ indexBuffer.put((short) (i * 2 + 1));
}
+ indexBuffer.put((short) 1);
+ // This leaves us on the interior edge of the perimeter between the inset vertices
+ // for boundary verts n-1 and 0.
- private void draw(float[] cameraView, float[] cameraPerspective) {
- // Build the ModelView and ModelViewProjection matrices
- // for calculating cube position and light.
- Matrix.multiplyMM(mModelViewMatrix, 0, cameraView, 0, mModelMatrix, 0);
- Matrix.multiplyMM(mModelViewProjectionMatrix, 0, cameraPerspective, 0, mModelViewMatrix, 0);
-
- // Set the position of the plane
- mVertexBuffer.rewind();
- GLES20.glVertexAttribPointer(
- mPlaneXZPositionAlphaAttribute, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false,
- BYTES_PER_FLOAT * COORDS_PER_VERTEX, mVertexBuffer);
-
- // Set the Model and ModelViewProjection matrices in the shader.
- GLES20.glUniformMatrix4fv(mPlaneModelUniform, 1, false, mModelMatrix, 0);
- GLES20.glUniformMatrix4fv(
- mPlaneModelViewProjectionUniform, 1, false, mModelViewProjectionMatrix, 0);
-
- mIndexBuffer.rewind();
- GLES20.glDrawElements(GLES20.GL_TRIANGLE_STRIP, mIndexBuffer.limit(),
- GLES20.GL_UNSIGNED_SHORT, mIndexBuffer);
- ShaderUtil.checkGLError(TAG, "Drawing plane");
+ // step 2, interior:
+ for (int i = 1; i < boundaryVertices / 2; ++i) {
+ indexBuffer.put((short) ((boundaryVertices - 1 - i) * 2 + 1));
+ indexBuffer.put((short) (i * 2 + 1));
}
-
- static class SortablePlane {
- final float mDistance;
- final Plane mPlane;
- SortablePlane(float distance, Plane plane) {
- this.mDistance = distance;
- this.mPlane = plane;
- }
+ if (boundaryVertices % 2 != 0) {
+ indexBuffer.put((short) ((boundaryVertices / 2) * 2 + 1));
}
+ }
- /**
- * Draws the collection of tracked planes, with closer planes hiding more distant ones.
- *
- * @param allPlanes The collection of planes to draw.
- * @param cameraPose The pose of the camera, as returned by {@link Camera#getPose()}
- * @param cameraPerspective The projection matrix, as returned by
- * {@link Camera#getProjectionMatrix(float[], int, float, float)}
- */
- public void drawPlanes(Collection<Plane> allPlanes, Pose cameraPose,
- float[] cameraPerspective) {
- // Planes must be sorted by distance from camera so that we draw closer planes first, and
- // they occlude the farther planes.
- List<SortablePlane> sortedPlanes = new ArrayList<>();
- float[] normal = new float[3];
- float cameraX = cameraPose.tx();
- float cameraY = cameraPose.ty();
- float cameraZ = cameraPose.tz();
- for (Plane plane : allPlanes) {
- if (plane.getTrackingState() != TrackingState.TRACKING
- || plane.getSubsumedBy() != null) {
- continue;
- }
+ private void draw(float[] cameraView, float[] cameraPerspective) {
+ // Build the ModelView and ModelViewProjection matrices
+ // for calculating cube position and light.
+ Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0);
+ Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0);
- Pose center = plane.getCenterPose();
- // Get transformed Y axis of plane's coordinate system.
- center.getTransformedAxis(1, 1.0f, normal, 0);
- // Compute dot product of plane's normal with vector from camera to plane center.
- float distance = (cameraX - center.tx()) * normal[0]
- + (cameraY - center.ty()) * normal[1] + (cameraZ - center.tz()) * normal[2];
- if (distance < 0) { // Plane is back-facing.
- continue;
- }
- sortedPlanes.add(new SortablePlane(distance, plane));
- }
- Collections.sort(sortedPlanes, new Comparator<SortablePlane>() {
- @Override
- public int compare(SortablePlane a, SortablePlane b) {
- return Float.compare(a.mDistance, b.mDistance);
- }
+ // Set the position of the plane
+ vertexBuffer.rewind();
+ GLES20.glVertexAttribPointer(
+ planeXZPositionAlphaAttribute,
+ COORDS_PER_VERTEX,
+ GLES20.GL_FLOAT,
+ false,
+ BYTES_PER_FLOAT * COORDS_PER_VERTEX,
+ vertexBuffer);
+
+ // Set the Model and ModelViewProjection matrices in the shader.
+ GLES20.glUniformMatrix4fv(planeModelUniform, 1, false, modelMatrix, 0);
+ GLES20.glUniformMatrix4fv(
+ planeModelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0);
+
+ indexBuffer.rewind();
+ GLES20.glDrawElements(
+ GLES20.GL_TRIANGLE_STRIP, indexBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, indexBuffer);
+ ShaderUtil.checkGLError(TAG, "Drawing plane");
+ }
+
+ static class SortablePlane {
+ final float distance;
+ final Plane plane;
+
+ SortablePlane(float distance, Plane plane) {
+ this.distance = distance;
+ this.plane = plane;
+ }
+ }
+
+ /**
+ * Draws the collection of tracked planes, with closer planes hiding more distant ones.
+ *
+ * @param allPlanes The collection of planes to draw.
+ * @param cameraPose The pose of the camera, as returned by {@link Camera#getPose()}
+ * @param cameraPerspective The projection matrix, as returned by {@link
+ * Camera#getProjectionMatrix(float[], int, float, float)}
+ */
+ public void drawPlanes(Collection<Plane> allPlanes, Pose cameraPose, float[] cameraPerspective) {
+ // Planes must be sorted by distance from camera so that we draw closer planes first, and
+ // they occlude the farther planes.
+ List<SortablePlane> sortedPlanes = new ArrayList<>();
+ float[] normal = new float[3];
+ float cameraX = cameraPose.tx();
+ float cameraY = cameraPose.ty();
+ float cameraZ = cameraPose.tz();
+ for (Plane plane : allPlanes) {
+ if (plane.getTrackingState() != TrackingState.TRACKING || plane.getSubsumedBy() != null) {
+ continue;
+ }
+
+ Pose center = plane.getCenterPose();
+ // Get transformed Y axis of plane's coordinate system.
+ center.getTransformedAxis(1, 1.0f, normal, 0);
+ // Compute dot product of plane's normal with vector from camera to plane center.
+ float distance =
+ (cameraX - center.tx()) * normal[0]
+ + (cameraY - center.ty()) * normal[1]
+ + (cameraZ - center.tz()) * normal[2];
+ if (distance < 0) { // Plane is back-facing.
+ continue;
+ }
+ sortedPlanes.add(new SortablePlane(distance, plane));
+ }
+ Collections.sort(
+ sortedPlanes,
+ new Comparator<SortablePlane>() {
+ @Override
+ public int compare(SortablePlane a, SortablePlane b) {
+ return Float.compare(a.distance, b.distance);
+ }
});
+ float[] cameraView = new float[16];
+ cameraPose.inverse().toMatrix(cameraView, 0);
- float[] cameraView = new float[16];
- cameraPose.inverse().toMatrix(cameraView, 0);
+ // Planes are drawn with additive blending, masked by the alpha channel for occlusion.
- // Planes are drawn with additive blending, masked by the alpha channel for occlusion.
+ // Start by clearing the alpha channel of the color buffer to 1.0.
+ GLES20.glClearColor(1, 1, 1, 1);
+ GLES20.glColorMask(false, false, false, true);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glColorMask(true, true, true, true);
- // Start by clearing the alpha channel of the color buffer to 1.0.
- GLES20.glClearColor(1, 1, 1, 1);
- GLES20.glColorMask(false, false, false, true);
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- GLES20.glColorMask(true, true, true, true);
+ // Disable depth write.
+ GLES20.glDepthMask(false);
- // Disable depth write.
- GLES20.glDepthMask(false);
+ // Additive blending, masked by alpha channel, clearing alpha channel.
+ GLES20.glEnable(GLES20.GL_BLEND);
+ GLES20.glBlendFuncSeparate(
+ GLES20.GL_DST_ALPHA, GLES20.GL_ONE, // RGB (src, dest)
+ GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA); // ALPHA (src, dest)
- // Additive blending, masked by alpha channel, clearing alpha channel.
- GLES20.glEnable(GLES20.GL_BLEND);
- GLES20.glBlendFuncSeparate(
- GLES20.GL_DST_ALPHA, GLES20.GL_ONE, // RGB (src, dest)
- GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA); // ALPHA (src, dest)
+ // Set up the shader.
+ GLES20.glUseProgram(planeProgram);
- // Set up the shader.
- GLES20.glUseProgram(mPlaneProgram);
+ // Attach the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+ GLES20.glUniform1i(textureUniform, 0);
- // Attach the texture.
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
- GLES20.glUniform1i(mTextureUniform, 0);
+ // Shared fragment uniforms.
+ GLES20.glUniform4fv(gridControlUniform, 1, GRID_CONTROL, 0);
- // Shared fragment uniforms.
- GLES20.glUniform4fv(mGridControlUniform, 1, GRID_CONTROL, 0);
+ // Enable vertex arrays
+ GLES20.glEnableVertexAttribArray(planeXZPositionAlphaAttribute);
- // Enable vertex arrays
- GLES20.glEnableVertexAttribArray(mPlaneXZPositionAlphaAttribute);
+ ShaderUtil.checkGLError(TAG, "Setting up to draw planes");
- ShaderUtil.checkGLError(TAG, "Setting up to draw planes");
+ for (SortablePlane sortedPlane : sortedPlanes) {
+ Plane plane = sortedPlane.plane;
+ float[] planeMatrix = new float[16];
+ plane.getCenterPose().toMatrix(planeMatrix, 0);
- for (SortablePlane sortedPlane : sortedPlanes) {
- Plane plane = sortedPlane.mPlane;
- float[] planeMatrix = new float[16];
- plane.getCenterPose().toMatrix(planeMatrix, 0);
+ updatePlaneParameters(
+ planeMatrix, plane.getExtentX(), plane.getExtentZ(), plane.getPolygon());
- updatePlaneParameters(
- planeMatrix, plane.getExtentX(), plane.getExtentZ(), plane.getPolygon());
+ // Get plane index. Keep a map to assign same indices to same planes.
+ Integer planeIndex = planeIndexMap.get(plane);
+ if (planeIndex == null) {
+ planeIndex = planeIndexMap.size();
+ planeIndexMap.put(plane, planeIndex);
+ }
- // Get plane index. Keep a map to assign same indices to same planes.
- Integer planeIndex = mPlaneIndexMap.get(plane);
- if (planeIndex == null) {
- planeIndex = mPlaneIndexMap.size();
- mPlaneIndexMap.put(plane, planeIndex);
- }
+ // Set plane color. Computed deterministically from the Plane index.
+ int colorIndex = planeIndex % PLANE_COLORS_RGBA.length;
+ colorRgbaToFloat(planeColor, PLANE_COLORS_RGBA[colorIndex]);
+ GLES20.glUniform4fv(lineColorUniform, 1, planeColor, 0);
+ GLES20.glUniform4fv(dotColorUniform, 1, planeColor, 0);
- // Set plane color. Computed deterministically from the Plane index.
- int colorIndex = planeIndex % PLANE_COLORS_RGBA.length;
- colorRgbaToFloat(mPlaneColor, PLANE_COLORS_RGBA[colorIndex]);
- GLES20.glUniform4fv(mLineColorUniform, 1, mPlaneColor, 0);
- GLES20.glUniform4fv(mDotColorUniform, 1, mPlaneColor, 0);
+ // Each plane will have its own angle offset from others, to make them easier to
+ // distinguish. Compute a 2x2 rotation matrix from the angle.
+ float angleRadians = planeIndex * 0.144f;
+ float uScale = DOTS_PER_METER;
+ float vScale = DOTS_PER_METER * EQUILATERAL_TRIANGLE_SCALE;
+ planeAngleUvMatrix[0] = +(float) Math.cos(angleRadians) * uScale;
+ planeAngleUvMatrix[1] = -(float) Math.sin(angleRadians) * vScale;
+ planeAngleUvMatrix[2] = +(float) Math.sin(angleRadians) * uScale;
+ planeAngleUvMatrix[3] = +(float) Math.cos(angleRadians) * vScale;
+ GLES20.glUniformMatrix2fv(planeUvMatrixUniform, 1, false, planeAngleUvMatrix, 0);
- // Each plane will have its own angle offset from others, to make them easier to
- // distinguish. Compute a 2x2 rotation matrix from the angle.
- float angleRadians = planeIndex * 0.144f;
- float uScale = DOTS_PER_METER;
- float vScale = DOTS_PER_METER * EQUILATERAL_TRIANGLE_SCALE;
- mPlaneAngleUvMatrix[0] = +(float) Math.cos(angleRadians) * uScale;
- mPlaneAngleUvMatrix[1] = -(float) Math.sin(angleRadians) * vScale;
- mPlaneAngleUvMatrix[2] = +(float) Math.sin(angleRadians) * uScale;
- mPlaneAngleUvMatrix[3] = +(float) Math.cos(angleRadians) * vScale;
- GLES20.glUniformMatrix2fv(mPlaneUvMatrixUniform, 1, false, mPlaneAngleUvMatrix, 0);
-
- draw(cameraView, cameraPerspective);
- }
-
- // Clean up the state we set
- GLES20.glDisableVertexAttribArray(mPlaneXZPositionAlphaAttribute);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
- GLES20.glDisable(GLES20.GL_BLEND);
- GLES20.glDepthMask(true);
-
- ShaderUtil.checkGLError(TAG, "Cleaning up after drawing planes");
+ draw(cameraView, cameraPerspective);
}
- private static void colorRgbaToFloat(float[] planeColor, int colorRgba) {
- planeColor[0] = ((float) ((colorRgba >> 24) & 0xff)) / 255.0f;
- planeColor[1] = ((float) ((colorRgba >> 16) & 0xff)) / 255.0f;
- planeColor[2] = ((float) ((colorRgba >> 8) & 0xff)) / 255.0f;
- planeColor[3] = ((float) ((colorRgba >> 0) & 0xff)) / 255.0f;
- }
+ // Clean up the state we set
+ GLES20.glDisableVertexAttribArray(planeXZPositionAlphaAttribute);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ GLES20.glDisable(GLES20.GL_BLEND);
+ GLES20.glDepthMask(true);
- private static final int[] PLANE_COLORS_RGBA = {
- 0xFFFFFFFF,
- 0xF44336FF,
- 0xE91E63FF,
- 0x9C27B0FF,
- 0x673AB7FF,
- 0x3F51B5FF,
- 0x2196F3FF,
- 0x03A9F4FF,
- 0x00BCD4FF,
- 0x009688FF,
- 0x4CAF50FF,
- 0x8BC34AFF,
- 0xCDDC39FF,
- 0xFFEB3BFF,
- 0xFFC107FF,
- 0xFF9800FF,
- };
+ ShaderUtil.checkGLError(TAG, "Cleaning up after drawing planes");
+ }
+
+ private static void colorRgbaToFloat(float[] planeColor, int colorRgba) {
+ planeColor[0] = ((float) ((colorRgba >> 24) & 0xff)) / 255.0f;
+ planeColor[1] = ((float) ((colorRgba >> 16) & 0xff)) / 255.0f;
+ planeColor[2] = ((float) ((colorRgba >> 8) & 0xff)) / 255.0f;
+ planeColor[3] = ((float) ((colorRgba >> 0) & 0xff)) / 255.0f;
+ }
+
+ private static final int[] PLANE_COLORS_RGBA = {
+ 0xFFFFFFFF,
+ 0xF44336FF,
+ 0xE91E63FF,
+ 0x9C27B0FF,
+ 0x673AB7FF,
+ 0x3F51B5FF,
+ 0x2196F3FF,
+ 0x03A9F4FF,
+ 0x00BCD4FF,
+ 0x009688FF,
+ 0x4CAF50FF,
+ 0x8BC34AFF,
+ 0xCDDC39FF,
+ 0xFFEB3BFF,
+ 0xFFC107FF,
+ 0xFF9800FF,
+ };
}
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PointCloudRenderer.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PointCloudRenderer.java
index 9862a74..35bbeba 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PointCloudRenderer.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/PointCloudRenderer.java
@@ -23,107 +23,103 @@
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
-/**
- * Renders a point cloud.
- */
+/** Renders a point cloud. */
public class PointCloudRenderer {
- private static final String TAG = PointCloud.class.getSimpleName();
+ private static final String TAG = PointCloud.class.getSimpleName();
- private static final int BYTES_PER_FLOAT = Float.SIZE / 8;
- private static final int FLOATS_PER_POINT = 4; // X,Y,Z,confidence.
- private static final int BYTES_PER_POINT = BYTES_PER_FLOAT * FLOATS_PER_POINT;
- private static final int INITIAL_BUFFER_POINTS = 1000;
+ private static final int BYTES_PER_FLOAT = Float.SIZE / 8;
+ private static final int FLOATS_PER_POINT = 4; // X,Y,Z,confidence.
+ private static final int BYTES_PER_POINT = BYTES_PER_FLOAT * FLOATS_PER_POINT;
+ private static final int INITIAL_BUFFER_POINTS = 1000;
- private int mVbo;
- private int mVboSize;
+ private int vbo;
+ private int vboSize;
- private int mProgramName;
- private int mPositionAttribute;
- private int mModelViewProjectionUniform;
- private int mColorUniform;
- private int mPointSizeUniform;
+ private int programName;
+ private int positionAttribute;
+ private int modelViewProjectionUniform;
+ private int colorUniform;
+ private int pointSizeUniform;
- private int mNumPoints = 0;
+ private int numPoints = 0;
- // Keep track of the last point cloud rendered to avoid updating the VBO if point cloud
- // was not changed.
- private PointCloud mLastPointCloud = null;
+ // Keep track of the last point cloud rendered to avoid updating the VBO if point cloud
+ // was not changed.
+ private PointCloud lastPointCloud = null;
- public PointCloudRenderer() {
+ public PointCloudRenderer() {}
+
+ /**
+ * Allocates and initializes OpenGL resources needed by the plane renderer. Must be called on the
+ * OpenGL thread, typically in {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}.
+ *
+ * @param context Needed to access shader source.
+ */
+ public void createOnGlThread(Context context) {
+ ShaderUtil.checkGLError(TAG, "before create");
+
+ int[] buffers = new int[1];
+ GLES20.glGenBuffers(1, buffers, 0);
+ vbo = buffers[0];
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo);
+
+ vboSize = INITIAL_BUFFER_POINTS * BYTES_PER_POINT;
+ GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+
+ ShaderUtil.checkGLError(TAG, "buffer alloc");
+
+ int vertexShader =
+ ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, R.raw.point_cloud_vertex);
+ int passthroughShader =
+ ShaderUtil.loadGLShader(
+ TAG, context, GLES20.GL_FRAGMENT_SHADER, R.raw.passthrough_fragment);
+
+ programName = GLES20.glCreateProgram();
+ GLES20.glAttachShader(programName, vertexShader);
+ GLES20.glAttachShader(programName, passthroughShader);
+ GLES20.glLinkProgram(programName);
+ GLES20.glUseProgram(programName);
+
+ ShaderUtil.checkGLError(TAG, "program");
+
+ positionAttribute = GLES20.glGetAttribLocation(programName, "a_Position");
+ colorUniform = GLES20.glGetUniformLocation(programName, "u_Color");
+ modelViewProjectionUniform = GLES20.glGetUniformLocation(programName, "u_ModelViewProjection");
+ pointSizeUniform = GLES20.glGetUniformLocation(programName, "u_PointSize");
+
+ ShaderUtil.checkGLError(TAG, "program params");
+ }
+
+ /**
+ * Updates the OpenGL buffer contents to the provided point. Repeated calls with the same point
+ * cloud will be ignored.
+ */
+ public void update(PointCloud cloud) {
+ if (lastPointCloud == cloud) {
+ // Redundant call.
+ return;
}
- /**
- * Allocates and initializes OpenGL resources needed by the plane renderer. Must be
- * called on the OpenGL thread, typically in
- * {@link GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig)}.
- *
- * @param context Needed to access shader source.
- */
- public void createOnGlThread(Context context) {
- ShaderUtil.checkGLError(TAG, "before create");
+ ShaderUtil.checkGLError(TAG, "before update");
- int[] buffers = new int[1];
- GLES20.glGenBuffers(1, buffers, 0);
- mVbo = buffers[0];
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVbo);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo);
+ lastPointCloud = cloud;
- mVboSize = INITIAL_BUFFER_POINTS * BYTES_PER_POINT;
- GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mVboSize, null, GLES20.GL_DYNAMIC_DRAW);
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
-
- ShaderUtil.checkGLError(TAG, "buffer alloc");
-
- int vertexShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_VERTEX_SHADER, R.raw.point_cloud_vertex);
- int passthroughShader = ShaderUtil.loadGLShader(TAG, context,
- GLES20.GL_FRAGMENT_SHADER, R.raw.passthrough_fragment);
-
- mProgramName = GLES20.glCreateProgram();
- GLES20.glAttachShader(mProgramName, vertexShader);
- GLES20.glAttachShader(mProgramName, passthroughShader);
- GLES20.glLinkProgram(mProgramName);
- GLES20.glUseProgram(mProgramName);
-
- ShaderUtil.checkGLError(TAG, "program");
-
- mPositionAttribute = GLES20.glGetAttribLocation(mProgramName, "a_Position");
- mColorUniform = GLES20.glGetUniformLocation(mProgramName, "u_Color");
- mModelViewProjectionUniform = GLES20.glGetUniformLocation(
- mProgramName, "u_ModelViewProjection");
- mPointSizeUniform = GLES20.glGetUniformLocation(mProgramName, "u_PointSize");
-
- ShaderUtil.checkGLError(TAG, "program params");
+ // If the VBO is not large enough to fit the new point cloud, resize it.
+ numPoints = lastPointCloud.getPoints().remaining() / FLOATS_PER_POINT;
+ if (numPoints * BYTES_PER_POINT > vboSize) {
+ while (numPoints * BYTES_PER_POINT > vboSize) {
+ vboSize *= 2;
+ }
+ GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW);
}
+ GLES20.glBufferSubData(
+ GLES20.GL_ARRAY_BUFFER, 0, numPoints * BYTES_PER_POINT, lastPointCloud.getPoints());
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
- /**
- * Updates the OpenGL buffer contents to the provided point. Repeated calls with the same
- * point cloud will be ignored.
- */
- public void update(PointCloud cloud) {
- if (mLastPointCloud == cloud) {
- // Redundant call.
- return;
- }
-
- ShaderUtil.checkGLError(TAG, "before update");
-
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVbo);
- mLastPointCloud = cloud;
-
- // If the VBO is not large enough to fit the new point cloud, resize it.
- mNumPoints = mLastPointCloud.getPoints().remaining() / FLOATS_PER_POINT;
- if (mNumPoints * BYTES_PER_POINT > mVboSize) {
- while (mNumPoints * BYTES_PER_POINT > mVboSize) {
- mVboSize *= 2;
- }
- GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mVboSize, null, GLES20.GL_DYNAMIC_DRAW);
- }
- GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, mNumPoints * BYTES_PER_POINT,
- mLastPointCloud.getPoints());
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
-
- ShaderUtil.checkGLError(TAG, "after update");
- }
+ ShaderUtil.checkGLError(TAG, "after update");
+ }
/**
* Renders the point cloud. ArCore point cloud is given in world space.
@@ -134,24 +130,23 @@
* com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)}.
*/
public void draw(float[] cameraView, float[] cameraPerspective) {
- float[] modelViewProjection = new float[16];
- Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, cameraView, 0);
+ float[] modelViewProjection = new float[16];
+ Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, cameraView, 0);
- ShaderUtil.checkGLError(TAG, "Before draw");
+ ShaderUtil.checkGLError(TAG, "Before draw");
- GLES20.glUseProgram(mProgramName);
- GLES20.glEnableVertexAttribArray(mPositionAttribute);
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVbo);
- GLES20.glVertexAttribPointer(
- mPositionAttribute, 4, GLES20.GL_FLOAT, false, BYTES_PER_POINT, 0);
- GLES20.glUniform4f(mColorUniform, 31.0f / 255.0f, 188.0f / 255.0f, 210.0f / 255.0f, 1.0f);
- GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false, modelViewProjection, 0);
- GLES20.glUniform1f(mPointSizeUniform, 5.0f);
+ GLES20.glUseProgram(programName);
+ GLES20.glEnableVertexAttribArray(positionAttribute);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo);
+ GLES20.glVertexAttribPointer(positionAttribute, 4, GLES20.GL_FLOAT, false, BYTES_PER_POINT, 0);
+ GLES20.glUniform4f(colorUniform, 31.0f / 255.0f, 188.0f / 255.0f, 210.0f / 255.0f, 1.0f);
+ GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjection, 0);
+ GLES20.glUniform1f(pointSizeUniform, 5.0f);
- GLES20.glDrawArrays(GLES20.GL_POINTS, 0, mNumPoints);
- GLES20.glDisableVertexAttribArray(mPositionAttribute);
- GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+ GLES20.glDrawArrays(GLES20.GL_POINTS, 0, numPoints);
+ GLES20.glDisableVertexAttribArray(positionAttribute);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
- ShaderUtil.checkGLError(TAG, "Draw");
- }
+ ShaderUtil.checkGLError(TAG, "Draw");
+ }
}
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ShaderUtil.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ShaderUtil.java
index 148ed81..fce0f43 100644
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ShaderUtil.java
+++ b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ShaderUtil.java
@@ -22,80 +22,78 @@
import java.io.InputStream;
import java.io.InputStreamReader;
-/**
- * Shader helper functions.
- */
+/** Shader helper functions. */
public class ShaderUtil {
- /**
- * Converts a raw text file, saved as a resource, into an OpenGL ES shader.
- *
- * @param type The type of shader we will be creating.
- * @param resId The resource ID of the raw text file about to be turned into a shader.
- * @return The shader object handler.
- */
- public static int loadGLShader(String tag, Context context, int type, int resId) {
- String code = readRawTextFile(context, resId);
- int shader = GLES20.glCreateShader(type);
- GLES20.glShaderSource(shader, code);
- GLES20.glCompileShader(shader);
+ /**
+ * Converts a raw text file, saved as a resource, into an OpenGL ES shader.
+ *
+ * @param type The type of shader we will be creating.
+ * @param resId The resource ID of the raw text file about to be turned into a shader.
+ * @return The shader object handler.
+ */
+ public static int loadGLShader(String tag, Context context, int type, int resId) {
+ String code = readRawTextFile(context, resId);
+ int shader = GLES20.glCreateShader(type);
+ GLES20.glShaderSource(shader, code);
+ GLES20.glCompileShader(shader);
- // Get the compilation status.
- final int[] compileStatus = new int[1];
- GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
+ // Get the compilation status.
+ final int[] compileStatus = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
- // If the compilation failed, delete the shader.
- if (compileStatus[0] == 0) {
- Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
- GLES20.glDeleteShader(shader);
- shader = 0;
- }
-
- if (shader == 0) {
- throw new RuntimeException("Error creating shader.");
- }
-
- return shader;
+ // If the compilation failed, delete the shader.
+ if (compileStatus[0] == 0) {
+ Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
}
- /**
- * Checks if we've had an error inside of OpenGL ES, and if so what that error is.
- *
- * @param label Label to report in case of error.
- * @throws RuntimeException If an OpenGL error is detected.
- */
- public static void checkGLError(String tag, String label) {
- int lastError = GLES20.GL_NO_ERROR;
- // Drain the queue of all errors.
- int error;
- while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
- Log.e(tag, label + ": glError " + error);
- lastError = error;
- }
- if (lastError != GLES20.GL_NO_ERROR) {
- throw new RuntimeException(label + ": glError " + lastError);
- }
+ if (shader == 0) {
+ throw new RuntimeException("Error creating shader.");
}
- /**
- * Converts a raw text file into a string.
- *
- * @param resId The resource ID of the raw text file about to be turned into a shader.
- * @return The context of the text file, or null in case of error.
- */
- private static String readRawTextFile(Context context, int resId) {
- InputStream inputStream = context.getResources().openRawResource(resId);
- try {
- BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
- StringBuilder sb = new StringBuilder();
- String line;
- while ((line = reader.readLine()) != null) {
- sb.append(line).append("\n");
- }
- reader.close();
- return sb.toString();
- } catch (IOException e) {
- e.printStackTrace();
- }
- return null;
+ return shader;
+ }
+
+ /**
+ * Checks if we've had an error inside of OpenGL ES, and if so what that error is.
+ *
+ * @param label Label to report in case of error.
+ * @throws RuntimeException If an OpenGL error is detected.
+ */
+ public static void checkGLError(String tag, String label) {
+ int lastError = GLES20.GL_NO_ERROR;
+ // Drain the queue of all errors.
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(tag, label + ": glError " + error);
+ lastError = error;
}
+ if (lastError != GLES20.GL_NO_ERROR) {
+ throw new RuntimeException(label + ": glError " + lastError);
+ }
+ }
+
+ /**
+ * Converts a raw text file into a string.
+ *
+ * @param resId The resource ID of the raw text file about to be turned into a shader.
+ * @return The context of the text file, or null in case of error.
+ */
+ private static String readRawTextFile(Context context, int resId) {
+ InputStream inputStream = context.getResources().openRawResource(resId);
+ try {
+ BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
+ StringBuilder sb = new StringBuilder();
+ String line;
+ while ((line = reader.readLine()) != null) {
+ sb.append(line).append("\n");
+ }
+ reader.close();
+ return sb.toString();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
}
diff --git a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/package-info.java b/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/package-info.java
deleted file mode 100644
index 435906b..0000000
--- a/samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2017 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * This package contains classes that do the rendering for this example.
- */
-package com.google.ar.core.examples.java.helloar.rendering;
diff --git a/samples/hello_ar_java/app/src/main/res/layout/activity_main.xml b/samples/hello_ar_java/app/src/main/res/layout/activity_main.xml
index 1875fea..2723fc1 100644
--- a/samples/hello_ar_java/app/src/main/res/layout/activity_main.xml
+++ b/samples/hello_ar_java/app/src/main/res/layout/activity_main.xml
@@ -19,10 +19,10 @@
android:layout_height="match_parent"
tools:context="com.google.ar.core.examples.java.helloar.HelloArActivity">
- <android.opengl.GLSurfaceView
- android:id="@+id/surfaceview"
- android:layout_width="fill_parent"
- android:layout_height="fill_parent"
- android:layout_gravity="top" />
+ <android.opengl.GLSurfaceView
+ android:id="@+id/surfaceview"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layout_gravity="top"/>
</RelativeLayout>
diff --git a/samples/hello_ar_java/app/src/main/res/values/strings.xml b/samples/hello_ar_java/app/src/main/res/values/strings.xml
index 5f8bc36..4555298 100644
--- a/samples/hello_ar_java/app/src/main/res/values/strings.xml
+++ b/samples/hello_ar_java/app/src/main/res/values/strings.xml
@@ -15,5 +15,5 @@
limitations under the License.
-->
<resources>
- <string name="app_name">HelloAR Java</string>
+ <string name="app_name">HelloAR Java</string>
</resources>
diff --git a/samples/hello_ar_java/app/src/main/res/values/styles.xml b/samples/hello_ar_java/app/src/main/res/values/styles.xml
index 59cf7e9..68b12b6 100644
--- a/samples/hello_ar_java/app/src/main/res/values/styles.xml
+++ b/samples/hello_ar_java/app/src/main/res/values/styles.xml
@@ -15,21 +15,21 @@
-->
<resources>
+ <!--
+ Base application theme, dependent on API level. This theme is replaced
+ by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
+ -->
+ <style name="AppBaseTheme" parent="android:Theme.Light">
<!--
- Base application theme, dependent on API level. This theme is replaced
- by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
+ Theme customizations available in newer API levels can go in
+ res/values-vXX/styles.xml, while customizations related to
+ backward-compatibility can go here.
-->
- <style name="AppBaseTheme" parent="android:Theme.Light">
- <!--
- Theme customizations available in newer API levels can go in
- res/values-vXX/styles.xml, while customizations related to
- backward-compatibility can go here.
- -->
- </style>
+ </style>
- <!-- Application theme. -->
- <style name="AppTheme" parent="AppBaseTheme">
- <!-- All customizations that are NOT specific to a particular API-level can go here. -->
- </style>
+ <!-- Application theme. -->
+ <style name="AppTheme" parent="AppBaseTheme">
+ <!-- All customizations that are NOT specific to a particular API-level can go here. -->
+ </style>
</resources>
diff --git a/samples/hello_ar_java/build.gradle b/samples/hello_ar_java/build.gradle
index 85691a6..be505f0 100644
--- a/samples/hello_ar_java/build.gradle
+++ b/samples/hello_ar_java/build.gradle
@@ -17,9 +17,6 @@
google()
jcenter()
mavenLocal()
- maven {
- url "${project.rootDir}/../../libraries/m2repository"
- }
}
}
diff --git a/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar b/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar
index 12a0871..7a3265e 100644
--- a/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar
+++ b/samples/hello_ar_java/gradle/wrapper/gradle-wrapper.jar
Binary files differ