GVR Android SDK v1.40.0
diff --git a/apks/controller_emulator.apk b/apks/controller_emulator.apk
index 407f1e0..70cf9c4 100644
--- a/apks/controller_emulator.apk
+++ b/apks/controller_emulator.apk
Binary files differ
diff --git a/build.gradle b/build.gradle
index 7fc1ad9..a3f222d 100644
--- a/build.gradle
+++ b/build.gradle
@@ -6,7 +6,7 @@
jcenter()
}
dependencies {
- classpath 'com.android.tools.build:gradle-experimental:0.8.1'
+ classpath 'com.android.tools.build:gradle-experimental:0.9.0-beta4'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
@@ -29,13 +29,13 @@
// The dependencies for NDK builds live inside the .aar files so they need to
// be extracted before NDK targets can build.
task extractAudioSo(type: Copy) {
- from zipTree("${project.rootDir}/libraries/sdk-audio-1.30.0.aar")
+ from zipTree("${project.rootDir}/libraries/sdk-audio-1.40.0.aar")
into "${project.rootDir}/libraries/"
include "jni/**/libgvr_audio.so"
}
task extractGvrSo(type: Copy) {
- from zipTree("${project.rootDir}/libraries/sdk-base-1.30.0.aar")
+ from zipTree("${project.rootDir}/libraries/sdk-base-1.40.0.aar")
into "${project.rootDir}/libraries/"
include "jni/**/libgvr.so"
}
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index 3baa851..7372f72 100644
--- a/gradle/wrapper/gradle-wrapper.jar
+++ b/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 1853f99..354f763 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
-#Tue Oct 18 14:52:21 PDT 2016
+#Mon Feb 13 16:09:10 GMT 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-bin.zip
diff --git a/gradlew b/gradlew
index 27309d9..4453cce 100755
--- a/gradlew
+++ b/gradlew
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/usr/bin/env sh
##############################################################################
##
@@ -154,11 +154,19 @@
esac
fi
-# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
-function splitJvmOpts() {
- JVM_OPTS=("$@")
+# Escape application args
+save ( ) {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
}
-eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
-JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
+APP_ARGS=$(save "$@")
-exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/gradlew.bat b/gradlew.bat
index 832fdb6..f955316 100644
--- a/gradlew.bat
+++ b/gradlew.bat
@@ -49,7 +49,6 @@
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
-if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
@@ -60,11 +59,6 @@
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
-goto execute
-
-:4NT_args
-@rem Get arguments from the 4NT Shell from JP Software
-set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
diff --git a/libraries/headers/vr/gvr/capi/include/gvr.h b/libraries/headers/vr/gvr/capi/include/gvr.h
index bdc8c07..7d3fc86 100644
--- a/libraries/headers/vr/gvr/capi/include/gvr.h
+++ b/libraries/headers/vr/gvr/capi/include/gvr.h
@@ -253,6 +253,9 @@
/// will ensure that the populated viewports reflect the currently paired
/// viewer.
///
+/// This function assumes that the client is *not* using multiview to render to
+/// multiple layers simultaneously.
+///
/// @param gvr Pointer to the gvr instance from which to get the viewports.
/// @param viewport_list Pointer to a previously allocated viewport list. This
/// will be populated with the recommended buffer viewports and resized if
@@ -340,7 +343,7 @@
gvr_clock_time_point target_presentation_time);
/// Queries whether a particular GVR feature is supported by the underlying
-/// platform.
+/// platform. This should be called after gvr_initialize_gl().
///
/// @param gvr The context to query against.
/// @param feature The gvr_feature type being queried.
@@ -491,6 +494,13 @@
void gvr_buffer_viewport_set_reprojection(gvr_buffer_viewport* viewport,
int32_t reprojection);
+/// Sets the layer in a multiview buffer from which the viewport should sample.
+///
+/// @param layer_index The layer in the array texture that distortion samples
+/// from. Must be non-negative. Defaults to 0.
+void gvr_buffer_viewport_set_source_layer(gvr_buffer_viewport* viewport,
+ int32_t layer_index);
+
/// Compares two gvr_buffer_viewport instances and returns true if they specify
/// the same view mapping.
///
@@ -617,6 +627,17 @@
void gvr_buffer_spec_set_depth_stencil_format(gvr_buffer_spec* spec,
int32_t depth_stencil_format);
+/// Sets the number of layers in a framebuffer backed by an array texture.
+///
+/// Default is 1, which means a non-layered texture will be created.
+/// Not all platforms support multiple layers, so clients can call
+/// gvr_is_feature_supported(GVR_FEATURE_MULTIVIEW) to check.
+///
+/// @param spec Buffer specification.
+/// @param num_layers The number of layers in the array texture.
+void gvr_buffer_spec_set_multiview_layers(gvr_buffer_spec* spec,
+ int32_t num_layers);
+
/// Creates a swap chain from the given buffer specifications.
/// This is a potentially time-consuming operation. All frames within the
/// swapchain will be allocated. Once rendering is stopped, call
@@ -971,6 +992,10 @@
if (viewport_) gvr_buffer_viewport_destroy(&viewport_);
}
+ explicit operator bool() const {
+ return viewport_ != nullptr;
+ }
+
/// For more information, see gvr_buffer_viewport_get_source_fov().
Rectf GetSourceFov() const {
return gvr_buffer_viewport_get_source_fov(viewport_);
@@ -1041,6 +1066,11 @@
gvr_buffer_viewport_set_reprojection(viewport_, reprojection);
}
+ /// For more information, see gvr_buffer_viewport_set_source_layer().
+ void SetSourceLayer(int32_t layer_index) {
+ gvr_buffer_viewport_set_source_layer(viewport_, layer_index);
+ }
+
/// For more information, see gvr_buffer_viewport_equal().
bool operator==(const BufferViewport& other) const {
return gvr_buffer_viewport_equal(viewport_, other.viewport_) ? true : false;
@@ -1052,7 +1082,7 @@
/// @name Wrapper manipulation
/// @{
/// Creates a C++ wrapper for a C object and takes ownership.
- explicit BufferViewport(gvr_buffer_viewport* viewport)
+ explicit BufferViewport(gvr_buffer_viewport* viewport = nullptr)
: viewport_(viewport) {}
/// Returns the wrapped C object. Does not affect ownership.
@@ -1084,6 +1114,11 @@
/// validity is tied to the lifetime of that instance.
class BufferViewportList {
public:
+ BufferViewportList()
+ : context_(nullptr),
+ viewport_list_(nullptr)
+ {}
+
BufferViewportList(BufferViewportList&& other)
: context_(nullptr), viewport_list_(nullptr) {
std::swap(context_, other.context_);
@@ -1102,6 +1137,10 @@
}
}
+ explicit operator bool() const {
+ return viewport_list_ != nullptr;
+ }
+
/// For more information, see gvr_get_recommended_buffer_viewports().
void SetToRecommendedBufferViewports() {
gvr_get_recommended_buffer_viewports(context_, viewport_list_);
@@ -1185,6 +1224,10 @@
if (spec_) gvr_buffer_spec_destroy(&spec_);
}
+ explicit operator bool() const {
+ return spec_ != nullptr;
+ }
+
/// Gets the buffer's size. The default value is the recommended render
/// target size. For more information, see gvr_buffer_spec_get_size().
Sizei GetSize() const {
@@ -1229,10 +1272,15 @@
gvr_buffer_spec_set_depth_stencil_format(spec_, depth_stencil_format);
}
+ /// For more information, see gvr_buffer_spec_set_multiview_layers().
+ void SetMultiviewLayers(int32_t num_layers) {
+ gvr_buffer_spec_set_multiview_layers(spec_, num_layers);
+ }
+
/// @name Wrapper manipulation
/// @{
/// Creates a C++ wrapper for a C object and takes ownership.
- explicit BufferSpec(gvr_buffer_spec* spec) : spec_(spec) {}
+ explicit BufferSpec(gvr_buffer_spec* spec = nullptr) : spec_(spec) {}
/// Returns the wrapped C object. Does not affect ownership.
gvr_buffer_spec* cobj() { return spec_; }
@@ -1275,6 +1323,10 @@
// The swap chain owns the frame, so no clean-up is required.
}
+ explicit operator bool() const {
+ return frame_ != nullptr;
+ }
+
/// For more information, see gvr_frame_get_buffer_size().
Sizei GetBufferSize(int32_t index) const {
return gvr_frame_get_buffer_size(frame_, index);
@@ -1305,7 +1357,7 @@
/// @name Wrapper manipulation
/// @{
/// Creates a C++ wrapper for a C object and takes ownership.
- explicit Frame(gvr_frame* frame) : frame_(frame) {}
+ explicit Frame(gvr_frame* frame = nullptr) : frame_(frame) {}
/// Returns the wrapped C object. Does not affect ownership.
gvr_frame* cobj() { return frame_; }
@@ -1349,6 +1401,10 @@
if (swap_chain_) gvr_swap_chain_destroy(&swap_chain_);
}
+ explicit operator bool() const {
+ return swap_chain_ != nullptr;
+ }
+
/// For more information, see gvr_swap_chain_get_buffer_count().
int32_t GetBufferCount() const {
return gvr_swap_chain_get_buffer_count(swap_chain_);
@@ -1376,7 +1432,8 @@
/// @name Wrapper manipulation
/// @{
/// Creates a C++ wrapper for a C object and takes ownership.
- explicit SwapChain(gvr_swap_chain* swap_chain) : swap_chain_(swap_chain) {}
+ explicit SwapChain(gvr_swap_chain* swap_chain = nullptr)
+ : swap_chain_(swap_chain) {}
/// Returns the wrapped C object. Does not affect ownership.
gvr_swap_chain* cobj() { return swap_chain_; }
diff --git a/libraries/headers/vr/gvr/capi/include/gvr_gesture.h b/libraries/headers/vr/gvr/capi/include/gvr_gesture.h
index 2df27fa..0860e00 100644
--- a/libraries/headers/vr/gvr/capi/include/gvr_gesture.h
+++ b/libraries/headers/vr/gvr/capi/include/gvr_gesture.h
@@ -111,7 +111,7 @@
/// Opaque handle to gesture context.
typedef struct gvr_gesture_context_ gvr_gesture_context;
-/// Opaque handle to gesture detector.
+/// Opaque handle to gesture.
typedef struct gvr_gesture_ gvr_gesture;
/// Creates and initializes a gesture context instance which can be used to
diff --git a/libraries/headers/vr/gvr/capi/include/gvr_types.h b/libraries/headers/vr/gvr/capi/include/gvr_types.h
index cf81b51..39329d9 100644
--- a/libraries/headers/vr/gvr/capi/include/gvr_types.h
+++ b/libraries/headers/vr/gvr/capi/include/gvr_types.h
@@ -56,6 +56,9 @@
// Asynchronous reprojection warps the app's rendered frame using the most
// recent head pose just before pushing the frame to the display.
GVR_FEATURE_ASYNC_REPROJECTION = 0,
+ // Support for framebuffers suitable for rendering with the GL_OVR_multiview2
+ // and GL_OVR_multiview_multisampled_render_to_texture extensions.
+ GVR_FEATURE_MULTIVIEW = 1,
} gvr_feature;
/// @}
diff --git a/libraries/sdk-audio-1.30.0.aar b/libraries/sdk-audio-1.30.0.aar
deleted file mode 100644
index 33f9991..0000000
--- a/libraries/sdk-audio-1.30.0.aar
+++ /dev/null
Binary files differ
diff --git a/libraries/sdk-audio-1.40.0.aar b/libraries/sdk-audio-1.40.0.aar
new file mode 100644
index 0000000..31c52fe
--- /dev/null
+++ b/libraries/sdk-audio-1.40.0.aar
Binary files differ
diff --git a/libraries/sdk-audio-1.30.0.pom b/libraries/sdk-audio-1.40.0.pom
similarity index 92%
rename from libraries/sdk-audio-1.30.0.pom
rename to libraries/sdk-audio-1.40.0.pom
index b3ebe60..fe3b943 100644
--- a/libraries/sdk-audio-1.30.0.pom
+++ b/libraries/sdk-audio-1.40.0.pom
@@ -4,7 +4,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-audio</artifactId>
- <version>1.30.0</version>
+ <version>1.40.0</version>
<packaging>aar</packaging>
<name>Google VR SDK-Audio</name>
@@ -19,7 +19,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-base</artifactId>
<type>aar</type>
- <version>1.30.0</version>
+ <version>1.40.0</version>
</dependency>
</dependencies>
</project>
diff --git a/libraries/sdk-base-1.30.0.aar b/libraries/sdk-base-1.30.0.aar
deleted file mode 100644
index 89270eb..0000000
--- a/libraries/sdk-base-1.30.0.aar
+++ /dev/null
Binary files differ
diff --git a/libraries/sdk-base-1.40.0.aar b/libraries/sdk-base-1.40.0.aar
new file mode 100644
index 0000000..414f1a0
--- /dev/null
+++ b/libraries/sdk-base-1.40.0.aar
Binary files differ
diff --git a/libraries/sdk-base-1.30.0.pom b/libraries/sdk-base-1.40.0.pom
similarity index 92%
rename from libraries/sdk-base-1.30.0.pom
rename to libraries/sdk-base-1.40.0.pom
index 38ee95c..5d22225 100644
--- a/libraries/sdk-base-1.30.0.pom
+++ b/libraries/sdk-base-1.40.0.pom
@@ -4,7 +4,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-base</artifactId>
- <version>1.30.0</version>
+ <version>1.40.0</version>
<packaging>aar</packaging>
<name>Google VR SDK-Base</name>
@@ -19,7 +19,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-common</artifactId>
<type>aar</type>
- <version>1.30.0</version>
+ <version>1.40.0</version>
</dependency>
</dependencies>
</project>
diff --git a/libraries/sdk-common-1.30.0.aar b/libraries/sdk-common-1.30.0.aar
deleted file mode 100644
index 91dc061..0000000
--- a/libraries/sdk-common-1.30.0.aar
+++ /dev/null
Binary files differ
diff --git a/libraries/sdk-common-1.40.0.aar b/libraries/sdk-common-1.40.0.aar
new file mode 100644
index 0000000..f3facdc
--- /dev/null
+++ b/libraries/sdk-common-1.40.0.aar
Binary files differ
diff --git a/libraries/sdk-common-1.30.0.pom b/libraries/sdk-common-1.40.0.pom
similarity index 96%
rename from libraries/sdk-common-1.30.0.pom
rename to libraries/sdk-common-1.40.0.pom
index dbe0d07..880bb05 100644
--- a/libraries/sdk-common-1.30.0.pom
+++ b/libraries/sdk-common-1.40.0.pom
@@ -4,7 +4,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-common</artifactId>
- <version>1.30.0</version>
+ <version>1.40.0</version>
<packaging>aar</packaging>
<name>Google VR SDK-Common</name>
diff --git a/libraries/sdk-commonwidget-1.30.0.aar b/libraries/sdk-commonwidget-1.40.0.aar
similarity index 99%
rename from libraries/sdk-commonwidget-1.30.0.aar
rename to libraries/sdk-commonwidget-1.40.0.aar
index d3c7c6d..fd3e9c4 100644
--- a/libraries/sdk-commonwidget-1.30.0.aar
+++ b/libraries/sdk-commonwidget-1.40.0.aar
Binary files differ
diff --git a/libraries/sdk-commonwidget-1.30.0.pom b/libraries/sdk-commonwidget-1.40.0.pom
similarity index 92%
rename from libraries/sdk-commonwidget-1.30.0.pom
rename to libraries/sdk-commonwidget-1.40.0.pom
index 2873671..15b5ba5 100644
--- a/libraries/sdk-commonwidget-1.30.0.pom
+++ b/libraries/sdk-commonwidget-1.40.0.pom
@@ -4,7 +4,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-commonwidget</artifactId>
- <version>1.30.0</version>
+ <version>1.40.0</version>
<packaging>aar</packaging>
<name>Google VR SDK-CommonWidget</name>
@@ -19,7 +19,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-common</artifactId>
<type>aar</type>
- <version>1.30.0</version>
+ <version>1.40.0</version>
</dependency>
</dependencies>
</project>
diff --git a/libraries/sdk-controller-1.30.0.aar b/libraries/sdk-controller-1.30.0.aar
deleted file mode 100644
index 3a8dcd6..0000000
--- a/libraries/sdk-controller-1.30.0.aar
+++ /dev/null
Binary files differ
diff --git a/libraries/sdk-controller-1.40.0.aar b/libraries/sdk-controller-1.40.0.aar
new file mode 100644
index 0000000..61202e5
--- /dev/null
+++ b/libraries/sdk-controller-1.40.0.aar
Binary files differ
diff --git a/libraries/sdk-controller-1.30.0.pom b/libraries/sdk-controller-1.40.0.pom
similarity index 92%
rename from libraries/sdk-controller-1.30.0.pom
rename to libraries/sdk-controller-1.40.0.pom
index 7b00865..00a51e8 100644
--- a/libraries/sdk-controller-1.30.0.pom
+++ b/libraries/sdk-controller-1.40.0.pom
@@ -4,7 +4,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-controller</artifactId>
- <version>1.30.0</version>
+ <version>1.40.0</version>
<packaging>aar</packaging>
<name>Google VR SDK-Controller</name>
@@ -19,7 +19,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-base</artifactId>
<type>aar</type>
- <version>1.30.0</version>
+ <version>1.40.0</version>
</dependency>
</dependencies>
</project>
diff --git a/libraries/sdk-panowidget-1.30.0.aar b/libraries/sdk-panowidget-1.30.0.aar
deleted file mode 100644
index 0a27158..0000000
--- a/libraries/sdk-panowidget-1.30.0.aar
+++ /dev/null
Binary files differ
diff --git a/libraries/sdk-panowidget-1.40.0.aar b/libraries/sdk-panowidget-1.40.0.aar
new file mode 100644
index 0000000..9ae2b42
--- /dev/null
+++ b/libraries/sdk-panowidget-1.40.0.aar
Binary files differ
diff --git a/libraries/sdk-panowidget-1.30.0.pom b/libraries/sdk-panowidget-1.40.0.pom
similarity index 92%
rename from libraries/sdk-panowidget-1.30.0.pom
rename to libraries/sdk-panowidget-1.40.0.pom
index 8ee2181..4498fdc 100644
--- a/libraries/sdk-panowidget-1.30.0.pom
+++ b/libraries/sdk-panowidget-1.40.0.pom
@@ -4,7 +4,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-panowidget</artifactId>
- <version>1.30.0</version>
+ <version>1.40.0</version>
<packaging>aar</packaging>
<name>Google VR SDK-CommonWidget</name>
@@ -19,7 +19,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-commonwidget</artifactId>
<type>aar</type>
- <version>1.30.0</version>
+ <version>1.40.0</version>
</dependency>
</dependencies>
</project>
diff --git a/libraries/sdk-videowidget-1.30.0.aar b/libraries/sdk-videowidget-1.30.0.aar
deleted file mode 100644
index e291979..0000000
--- a/libraries/sdk-videowidget-1.30.0.aar
+++ /dev/null
Binary files differ
diff --git a/libraries/sdk-videowidget-1.40.0.aar b/libraries/sdk-videowidget-1.40.0.aar
new file mode 100644
index 0000000..3141ea0
--- /dev/null
+++ b/libraries/sdk-videowidget-1.40.0.aar
Binary files differ
diff --git a/libraries/sdk-videowidget-1.30.0.pom b/libraries/sdk-videowidget-1.40.0.pom
similarity index 77%
rename from libraries/sdk-videowidget-1.30.0.pom
rename to libraries/sdk-videowidget-1.40.0.pom
index a80d929..df8f3a1 100644
--- a/libraries/sdk-videowidget-1.30.0.pom
+++ b/libraries/sdk-videowidget-1.40.0.pom
@@ -4,7 +4,7 @@
<groupId>com.google.vr</groupId>
<artifactId>sdk-videowidget</artifactId>
- <version>1.30.0</version>
+ <version>1.40.0</version>
<packaging>aar</packaging>
<name>Google VR SDK-CommonWidget</name>
@@ -16,17 +16,17 @@
<dependencies>
<dependency>
- <groupId>com.google.android.exoplayer</groupId>
- <artifactId>exoplayer</artifactId>
- <type>aar</type>
- <version>r2.1.0</version>
+ <groupId>com.android.support</groupId>
+ <artifactId>support-annotations</artifactId>
+ <version>25.0.1</version>
+ <scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.google.vr</groupId>
<artifactId>sdk-commonwidget</artifactId>
<type>aar</type>
- <version>1.30.0</version>
+ <version>1.40.0</version>
</dependency>
</dependencies>
</project>
diff --git a/samples/ndk-controllerpaint/build.gradle b/samples/ndk-controllerpaint/build.gradle
index fda58ce..5cddd2f 100644
--- a/samples/ndk-controllerpaint/build.gradle
+++ b/samples/ndk-controllerpaint/build.gradle
@@ -53,7 +53,7 @@
}
dependencies {
- compile 'com.google.vr:sdk-base:1.30.0'
+ compile 'com.google.vr:sdk-base:1.40.0'
}
build.dependsOn(':extractNdk')
\ No newline at end of file
diff --git a/samples/ndk-controllerpaint/src/main/AndroidManifest.xml b/samples/ndk-controllerpaint/src/main/AndroidManifest.xml
index e80a125..59ea891 100644
--- a/samples/ndk-controllerpaint/src/main/AndroidManifest.xml
+++ b/samples/ndk-controllerpaint/src/main/AndroidManifest.xml
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.vr.ndk.samples.controllerpaint"
- android:versionCode="170215033"
- android:versionName="1.30.0">
+ android:versionCode="170314023"
+ android:versionName="1.40.0">
<!-- The Daydream SDK requires API 24+ and OpenGL ES 2+. -->
<uses-sdk android:minSdkVersion="24" android:targetSdkVersion="24" />
diff --git a/samples/ndk-controllerpaint/src/main/java/com/google/vr/ndk/samples/controllerpaint/MainActivity.java b/samples/ndk-controllerpaint/src/main/java/com/google/vr/ndk/samples/controllerpaint/MainActivity.java
index 1d5e981..a041976 100644
--- a/samples/ndk-controllerpaint/src/main/java/com/google/vr/ndk/samples/controllerpaint/MainActivity.java
+++ b/samples/ndk-controllerpaint/src/main/java/com/google/vr/ndk/samples/controllerpaint/MainActivity.java
@@ -30,35 +30,52 @@
import javax.microedition.khronos.opengles.GL10;
/**
- * Main Activity.
+ * A Google VR NDK sample application.
*
- * <p>This is the main Activity for this demo app. It consists of a GLSurfaceView that is
- * responsible for doing the rendering. We forward all of the interesting events to native code.
+ * <p>This app is a "paint program" that allows the user to paint in virtual space using the
+ * controller. A cursor shows where the controller is pointing at. Touching or clicking the touchpad
+ * begins drawing. Then, as the user moves their hand, lines are drawn. The user can switch the
+ * drawing color by swiping to the right or left on the touchpad. The user can also change the
+ * drawing stroke width by moving their finger up and down on the touchpad.
+ *
+ * <p>This is the main Activity for the sample application. It initializes a GLSurfaceView to allow
+ * rendering, a GvrLayout for GVR API access, and forwards relevant events to the native demo app
+ * instance where rendering and interaction are handled.
*/
public class MainActivity extends Activity {
private static final String TAG = "MainActivity";
- // Opaque native pointer to the DemoApp C++ object.
- // This object is owned by the MainActivity instance and passed to the native methods.
- private long nativeControllerPaint;
-
- // This is done on the GL thread because refreshViewerProfile isn't thread-safe.
- private final Runnable refreshViewerProfileRunnable =
- new Runnable() {
- @Override
- public void run() {
- gvrLayout.getGvrApi().refreshViewerProfile();
- }
- };
static {
// Load our JNI code.
System.loadLibrary("controllerpaint_jni");
}
+ // Opaque native pointer to the DemoApp C++ object.
+ // This object is owned by the MainActivity instance and passed to the native methods.
+ private long nativeControllerPaint;
+
private GvrLayout gvrLayout;
private GLSurfaceView surfaceView;
private AssetManager assetManager;
+ // Note that pause and resume signals to the native app are performed on the GL thread, ensuring
+ // thread-safety.
+ private final Runnable pauseNativeRunnable =
+ new Runnable() {
+ @Override
+ public void run() {
+ nativeOnPause(nativeControllerPaint);
+ }
+ };
+
+ private final Runnable resumeNativeRunnable =
+ new Runnable() {
+ @Override
+ public void run() {
+ nativeOnResume(nativeControllerPaint);
+ }
+ };
+
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
@@ -131,9 +148,9 @@
@Override
protected void onPause() {
+ surfaceView.queueEvent(pauseNativeRunnable);
surfaceView.onPause();
gvrLayout.onPause();
- nativeOnPause(nativeControllerPaint);
super.onPause();
}
@@ -142,8 +159,7 @@
super.onResume();
gvrLayout.onResume();
surfaceView.onResume();
- nativeOnResume(nativeControllerPaint);
- surfaceView.queueEvent(refreshViewerProfileRunnable);
+ surfaceView.queueEvent(resumeNativeRunnable);
}
@Override
@@ -195,10 +211,10 @@
};
private native long nativeOnCreate(AssetManager assetManager, long gvrContextPtr);
+ private native void nativeOnDestroy(long controllerPaintJptr);
private native void nativeOnResume(long controllerPaintJptr);
private native void nativeOnPause(long controllerPaintJptr);
private native void nativeOnSurfaceCreated(long controllerPaintJptr);
private native void nativeOnSurfaceChanged(int width, int height, long controllerPaintJptr);
private native void nativeOnDrawFrame(long controllerPaintJptr);
- private native void nativeOnDestroy(long controllerPaintJptr);
}
diff --git a/samples/ndk-controllerpaint/src/main/jni/demoapp.cc b/samples/ndk-controllerpaint/src/main/jni/demoapp.cc
index a23033f..db6c365 100644
--- a/samples/ndk-controllerpaint/src/main/jni/demoapp.cc
+++ b/samples/ndk-controllerpaint/src/main/jni/demoapp.cc
@@ -205,6 +205,7 @@
void DemoApp::OnResume() {
LOGD("DemoApp::OnResume");
if (gvr_api_initialized_) {
+ gvr_api_->RefreshViewerProfile();
gvr_api_->ResumeTracking();
}
if (controller_api_) controller_api_->Resume();
diff --git a/samples/ndk-treasurehunt/build.gradle b/samples/ndk-treasurehunt/build.gradle
index e12fac8..e2dadeb 100644
--- a/samples/ndk-treasurehunt/build.gradle
+++ b/samples/ndk-treasurehunt/build.gradle
@@ -54,8 +54,8 @@
}
dependencies {
- compile 'com.google.vr:sdk-audio:1.30.0'
- compile 'com.google.vr:sdk-base:1.30.0'
+ compile 'com.google.vr:sdk-audio:1.40.0'
+ compile 'com.google.vr:sdk-base:1.40.0'
}
build.dependsOn(':extractNdk')
diff --git a/samples/ndk-treasurehunt/src/main/AndroidManifest.xml b/samples/ndk-treasurehunt/src/main/AndroidManifest.xml
index 764d938..b2af321 100644
--- a/samples/ndk-treasurehunt/src/main/AndroidManifest.xml
+++ b/samples/ndk-treasurehunt/src/main/AndroidManifest.xml
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.vr.ndk.samples.treasurehunt"
- android:versionCode="170215033"
- android:versionName="1.30.0">
+ android:versionCode="170314023"
+ android:versionName="1.40.0">
<!-- The GVR SDK requires API 19+ and OpenGL ES 2+. -->
<uses-sdk android:minSdkVersion="19" android:targetSdkVersion="24" />
diff --git a/samples/ndk-treasurehunt/src/main/java/com/google/vr/ndk/samples/treasurehunt/MainActivity.java b/samples/ndk-treasurehunt/src/main/java/com/google/vr/ndk/samples/treasurehunt/MainActivity.java
index 9b16e93..2bcd256 100644
--- a/samples/ndk-treasurehunt/src/main/java/com/google/vr/ndk/samples/treasurehunt/MainActivity.java
+++ b/samples/ndk-treasurehunt/src/main/java/com/google/vr/ndk/samples/treasurehunt/MainActivity.java
@@ -23,33 +23,55 @@
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
-import android.view.WindowManager;
import com.google.vr.ndk.base.AndroidCompat;
import com.google.vr.ndk.base.GvrLayout;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
-/** A Gvr API sample application. */
+/**
+ * A Google VR NDK sample application.
+ *
+ * <p>This app presents a scene consisting of a planar ground grid and a floating "treasure" cube.
+ * When the user finds the "treasure", they can invoke the trigger action, and the cube will be
+ * randomly repositioned. When in Cardboard mode, the user must gaze at the cube and use the
+ * Cardboard trigger button. When in Daydream mode, the user can use the controller to position the
+ * cursor, and use the controller buttons to invoke the trigger action.
+ *
+ * <p>This is the main Activity for the sample application. It initializes a GLSurfaceView to allow
+ * rendering, a GvrLayout for GVR API access, and forwards relevant events to the native renderer
+ * where rendering and interaction are handled.
+ */
public class MainActivity extends Activity {
- private GvrLayout gvrLayout;
- private long nativeTreasureHuntRenderer;
- private GLSurfaceView surfaceView;
-
- // This is done on the GL thread because refreshViewerProfile isn't thread-safe.
- private final Runnable refreshViewerProfileRunnable =
- new Runnable() {
- @Override
- public void run() {
- gvrLayout.getGvrApi().refreshViewerProfile();
- }
- };
-
static {
System.loadLibrary("gvr");
System.loadLibrary("gvr_audio");
System.loadLibrary("treasurehunt_jni");
}
+ // Opaque native pointer to the native TreasureHuntRenderer instance.
+ private long nativeTreasureHuntRenderer;
+
+ private GvrLayout gvrLayout;
+ private GLSurfaceView surfaceView;
+
+ // Note that pause and resume signals to the native renderer are performed on the GL thread,
+ // ensuring thread-safety.
+ private final Runnable pauseNativeRunnable =
+ new Runnable() {
+ @Override
+ public void run() {
+ nativeOnPause(nativeTreasureHuntRenderer);
+ }
+ };
+
+ private final Runnable resumeNativeRunnable =
+ new Runnable() {
+ @Override
+ public void run() {
+ nativeOnResume(nativeTreasureHuntRenderer);
+ }
+ };
+
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
@@ -103,7 +125,13 @@
if (event.getAction() == MotionEvent.ACTION_DOWN) {
// Give user feedback and signal a trigger event.
((Vibrator) getSystemService(Context.VIBRATOR_SERVICE)).vibrate(50);
- nativeOnTriggerEvent(nativeTreasureHuntRenderer);
+ surfaceView.queueEvent(
+ new Runnable() {
+ @Override
+ public void run() {
+ nativeOnTriggerEvent(nativeTreasureHuntRenderer);
+ }
+ });
return true;
}
return false;
@@ -124,26 +152,22 @@
// Enable VR Mode.
AndroidCompat.setVrModeEnabled(this, true);
-
- // Prevent screen from dimming/locking.
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
protected void onPause() {
- super.onPause();
- nativeOnPause(nativeTreasureHuntRenderer);
- gvrLayout.onPause();
+ surfaceView.queueEvent(pauseNativeRunnable);
surfaceView.onPause();
+ gvrLayout.onPause();
+ super.onPause();
}
@Override
protected void onResume() {
super.onResume();
- nativeOnResume(nativeTreasureHuntRenderer);
gvrLayout.onResume();
surfaceView.onResume();
- surfaceView.queueEvent(refreshViewerProfileRunnable);
+ surfaceView.queueEvent(resumeNativeRunnable);
}
@Override
@@ -154,6 +178,7 @@
// native resources from the UI thread.
gvrLayout.shutdown();
nativeDestroyRenderer(nativeTreasureHuntRenderer);
+ nativeTreasureHuntRenderer = 0;
}
@Override
@@ -188,16 +213,10 @@
private native long nativeCreateRenderer(
ClassLoader appClassLoader, Context context, long nativeGvrContext);
-
private native void nativeDestroyRenderer(long nativeTreasureHuntRenderer);
-
private native void nativeInitializeGl(long nativeTreasureHuntRenderer);
-
private native long nativeDrawFrame(long nativeTreasureHuntRenderer);
-
private native void nativeOnTriggerEvent(long nativeTreasureHuntRenderer);
-
private native void nativeOnPause(long nativeTreasureHuntRenderer);
-
private native void nativeOnResume(long nativeTreasureHuntRenderer);
}
diff --git a/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.cc b/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.cc
index 0ec9119..1c21f9e 100644
--- a/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.cc
+++ b/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.cc
@@ -14,6 +14,7 @@
*/
#include "treasure_hunt_renderer.h" // NOLINT
+#include "treasure_hunt_shaders.h" // NOLINT
#include <android/log.h>
#include <assert.h>
@@ -54,76 +55,6 @@
static const float kPitchLimit = 0.12f;
static const float kYawLimit = 0.12f;
-static const char* kGridFragmentShader = R"glsl(
- precision mediump float;
- varying vec4 v_Color;
- varying vec3 v_Grid;
-
- void main() {
- float depth = gl_FragCoord.z / gl_FragCoord.w;
- if ((mod(abs(v_Grid.x), 10.0) < 0.1) ||
- (mod(abs(v_Grid.z), 10.0) < 0.1)) {
- gl_FragColor = max(0.0, (90.0-depth) / 90.0) *
- vec4(1.0, 1.0, 1.0, 1.0) +
- min(1.0, depth / 90.0) * v_Color;
- } else {
- gl_FragColor = v_Color;
- }
- })glsl";
-
-static const char* kLightVertexShader = R"glsl(
- uniform mat4 u_Model;
- uniform mat4 u_MVP;
- uniform mat4 u_MVMatrix;
- uniform vec3 u_LightPos;
- attribute vec4 a_Position;
- attribute vec4 a_Color;
- attribute vec3 a_Normal;
- varying vec4 v_Color;
- varying vec3 v_Grid;
-
- void main() {
- v_Grid = vec3(u_Model * a_Position);
- vec3 modelViewVertex = vec3(u_MVMatrix * a_Position);
- vec3 modelViewNormal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));
- float distance = length(u_LightPos - modelViewVertex);
- vec3 lightVector = normalize(u_LightPos - modelViewVertex);
- float diffuse = max(dot(modelViewNormal, lightVector), 0.5);
- diffuse = diffuse * (1.0 / (1.0 + (0.00001 * distance * distance)));
- v_Color = vec4(a_Color.rgb * diffuse, a_Color.a);
- gl_Position = u_MVP * a_Position;
- })glsl";
-
-static const char* kPassthroughFragmentShader = R"glsl(
- precision mediump float;
- varying vec4 v_Color;
-
- void main() {
- gl_FragColor = v_Color;
- })glsl";
-
-static const char* kReticleVertexShader = R"glsl(
- uniform mat4 u_MVP;
- attribute vec4 a_Position;
- varying vec2 v_Coords;
-
- void main() {
- v_Coords = a_Position.xy;
- gl_Position = u_MVP * a_Position;
- })glsl";
-
-static const char* kReticleFragmentShader = R"glsl(
- precision mediump float;
-
- varying vec2 v_Coords;
-
- void main() {
- float r = length(v_Coords);
- float alpha = smoothstep(0.5, 0.6, r) * (1.0 - smoothstep(0.8, 0.9, r));
- if (alpha == 0.0) discard;
- gl_FragColor = vec4(alpha);
- })glsl";
-
// Sound file in APK assets.
static const char* kObjectSoundFile = "cube_sound.wav";
static const char* kSuccessSoundFile = "success.wav";
@@ -140,6 +71,31 @@
return result;
}
+// Flatten a pair of mat4's into an array of 32 floats, useful when feeding
+// uniform values to OpenGL for multiview.
+static std::array<float, 32> MatrixPairToGLArray(const gvr::Mat4f matrix[]) {
+ std::array<float, 32> result;
+ for (int i = 0; i < 4; ++i) {
+ for (int j = 0; j < 4; ++j) {
+ result[j * 4 + i] = matrix[0].m[i][j];
+ result[16 + j * 4 + i] = matrix[1].m[i][j];
+ }
+ }
+ return result;
+}
+
+// Flatten a pair of vec3's into an array of 6 floats, useful when feeding
+// uniform values to OpenGL for multiview.
+static std::array<float, 6> VectorPairToGLArray(
+ const std::array<float, 3> vec[]) {
+ std::array<float, 6> result;
+ for (int k = 0; k < 3; ++k) {
+ result[k] = vec[0][k];
+ result[k + 3] = vec[1][k];
+ }
+ return result;
+}
+
static std::array<float, 4> MatrixVectorMul(const gvr::Mat4f& matrix,
const std::array<float, 4>& vec) {
std::array<float, 4> result;
@@ -166,6 +122,10 @@
return result;
}
+static std::array<float, 3> Vec4ToVec3(const std::array<float, 4>& vec) {
+ return {vec[0], vec[1], vec[2]};
+}
+
static gvr::Mat4f PerspectiveMatrixFromView(const gvr::Rectf& fov, float z_near,
float z_far) {
gvr::Mat4f result;
@@ -293,7 +253,8 @@
gvr_context* gvr_context, std::unique_ptr<gvr::AudioApi> gvr_audio_api)
: gvr_api_(gvr::GvrApi::WrapNonOwned(gvr_context)),
gvr_audio_api_(std::move(gvr_audio_api)),
- scratch_viewport_(gvr_api_->CreateBufferViewport()),
+ viewport_left_(gvr_api_->CreateBufferViewport()),
+ viewport_right_(gvr_api_->CreateBufferViewport()),
floor_vertices_(world_layout_data_.floor_coords.data()),
cube_vertices_(world_layout_data_.cube_coords.data()),
cube_colors_(world_layout_data_.cube_colors.data()),
@@ -326,16 +287,20 @@
void TreasureHuntRenderer::InitializeGl() {
gvr_api_->InitializeGl();
+ multiview_enabled_ = gvr_api_->IsFeatureSupported(GVR_FEATURE_MULTIVIEW);
+ LOGD(multiview_enabled_ ? "Using multiview." : "Not using multiview.");
- const int vertex_shader = LoadGLShader(GL_VERTEX_SHADER, &kLightVertexShader);
+ int index = multiview_enabled_ ? 1 : 0;
+ const int vertex_shader =
+ LoadGLShader(GL_VERTEX_SHADER, &kDiffuseLightingVertexShaders[index]);
const int grid_shader =
- LoadGLShader(GL_FRAGMENT_SHADER, &kGridFragmentShader);
+ LoadGLShader(GL_FRAGMENT_SHADER, &kGridFragmentShaders[index]);
const int pass_through_shader =
- LoadGLShader(GL_FRAGMENT_SHADER, &kPassthroughFragmentShader);
+ LoadGLShader(GL_FRAGMENT_SHADER, &kPassthroughFragmentShaders[index]);
const int reticle_vertex_shader =
- LoadGLShader(GL_VERTEX_SHADER, &kReticleVertexShader);
+ LoadGLShader(GL_VERTEX_SHADER, &kReticleVertexShaders[index]);
const int reticle_fragment_shader =
- LoadGLShader(GL_FRAGMENT_SHADER, &kReticleFragmentShader);
+ LoadGLShader(GL_FRAGMENT_SHADER, &kReticleFragmentShaders[index]);
cube_program_ = glCreateProgram();
glAttachShader(cube_program_, vertex_shader);
@@ -383,7 +348,7 @@
CheckGLError("Reticle program");
- reticle_position_param_ = glGetAttribLocation(floor_program_, "a_Position");
+ reticle_position_param_ = glGetAttribLocation(reticle_program_, "a_Position");
reticle_modelview_projection_param_ =
glGetUniformLocation(reticle_program_, "u_MVP");
@@ -413,9 +378,18 @@
specs.push_back(gvr_api_->CreateBufferSpec());
specs[0].SetColorFormat(GVR_COLOR_FORMAT_RGBA_8888);
specs[0].SetDepthStencilFormat(GVR_DEPTH_STENCIL_FORMAT_DEPTH_16);
- specs[0].SetSize(render_size_);
specs[0].SetSamples(2);
+ // With multiview, the distortion buffer is a texture array with two layers
+ // whose width is half the display width.
+ if (multiview_enabled_) {
+ gvr::Sizei half_size = { render_size_.width / 2, render_size_.height };
+ specs[0].SetMultiviewLayers(2);
+ specs[0].SetSize(half_size);
+ } else {
+ specs[0].SetSize(render_size_);
+ }
+
specs.push_back(gvr_api_->CreateBufferSpec());
specs[1].SetSize(reticle_render_size_);
specs[1].SetColorFormat(GVR_COLOR_FORMAT_RGBA_8888);
@@ -489,26 +463,56 @@
// A client app does its rendering here.
gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
-
+ gvr::BufferViewport* viewport[2] = {
+ &viewport_left_,
+ &viewport_right_,
+ };
head_view_ = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
- gvr::Mat4f left_eye_matrix = gvr_api_->GetEyeFromHeadMatrix(GVR_LEFT_EYE);
- gvr::Mat4f right_eye_matrix = gvr_api_->GetEyeFromHeadMatrix(GVR_RIGHT_EYE);
- gvr::Mat4f left_eye_view = MatrixMul(left_eye_matrix, head_view_);
- gvr::Mat4f right_eye_view = MatrixMul(right_eye_matrix, head_view_);
-
viewport_list_->SetToRecommendedBufferViewports();
gvr::BufferViewport reticle_viewport = gvr_api_->CreateBufferViewport();
reticle_viewport.SetSourceBufferIndex(1);
reticle_viewport.SetReprojection(GVR_REPROJECTION_NONE);
- reticle_viewport.SetSourceUv({0.f, 1.f, 0.f, 1.f});
+ const gvr_rectf fullscreen = { 0, 1, 0, 1 };
+ reticle_viewport.SetSourceUv(fullscreen);
- // Use the viewport transform to put the reticle in the correct place.
- reticle_viewport.SetTransform(MatrixMul(left_eye_matrix, model_reticle_));
- reticle_viewport.SetTargetEye(GVR_LEFT_EYE);
- viewport_list_->SetBufferViewport(2, reticle_viewport);
- reticle_viewport.SetTransform(MatrixMul(right_eye_matrix, model_reticle_));
- reticle_viewport.SetTargetEye(GVR_RIGHT_EYE);
- viewport_list_->SetBufferViewport(3, reticle_viewport);
+ gvr::Mat4f controller_matrix =
+ ControllerQuatToMatrix(gvr_controller_state_.GetOrientation());
+ model_cursor_ = MatrixMul(controller_matrix, model_reticle_);
+
+ gvr::Mat4f eye_views[2];
+ for (int eye = 0; eye < 2; ++eye) {
+ const gvr::Eye gvr_eye = eye == 0 ? GVR_LEFT_EYE : GVR_RIGHT_EYE;
+ const gvr::Mat4f eye_from_head = gvr_api_->GetEyeFromHeadMatrix(gvr_eye);
+ eye_views[eye] = MatrixMul(eye_from_head, head_view_);
+
+ viewport_list_->GetBufferViewport(eye, viewport[eye]);
+
+ if (multiview_enabled_) {
+ viewport[eye]->SetSourceUv(fullscreen);
+ viewport[eye]->SetSourceLayer(eye);
+ viewport_list_->SetBufferViewport(eye, *viewport[eye]);
+ }
+
+ reticle_viewport.SetTransform(MatrixMul(eye_from_head, model_reticle_));
+ reticle_viewport.SetTargetEye(gvr_eye);
+ // The first two viewports are for the 3D scene (one for each eye), the
+ // latter two viewports are for the reticle (one for each eye).
+ viewport_list_->SetBufferViewport(2 + eye, reticle_viewport);
+
+ modelview_cube_[eye] = MatrixMul(eye_views[eye], model_cube_);
+ modelview_floor_[eye] = MatrixMul(eye_views[eye], model_floor_);
+ const gvr_rectf fov = viewport[eye]->GetSourceFov();
+ const gvr::Mat4f perspective =
+ PerspectiveMatrixFromView(fov, kZNear, kZFar);
+ modelview_projection_cube_[eye] =
+ MatrixMul(perspective, modelview_cube_[eye]);
+ modelview_projection_floor_[eye] =
+ MatrixMul(perspective, modelview_floor_[eye]);
+ light_pos_eye_space_[eye] =
+ Vec4ToVec3(MatrixVectorMul(eye_views[eye], light_pos_world_space_));
+ modelview_projection_cursor_[eye] =
+ MatrixMul(perspective, MatrixMul(eye_views[eye], model_cursor_));
+ }
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
@@ -519,10 +523,12 @@
frame.BindBuffer(0);
glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up.
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
- viewport_list_->GetBufferViewport(0, &scratch_viewport_);
- DrawWorld(left_eye_view, scratch_viewport_);
- viewport_list_->GetBufferViewport(1, &scratch_viewport_);
- DrawWorld(right_eye_view, scratch_viewport_);
+ if (multiview_enabled_) {
+ DrawWorld(kMultiview);
+ } else {
+ DrawWorld(kLeftView);
+ DrawWorld(kRightView);
+ }
frame.Unbind();
frame.BindBuffer(1);
@@ -532,9 +538,9 @@
// In Cardboard viewer, draw head-locked reticle on a separate layer since the
// cursor is controlled by head movement. In Daydream viewer, this layer is
// left empty, since the cursor is controlled by controller and drawn with
- // DrawCursor() in the same frame buffer as the virtual scene.
+ // DrawDaydreamCursor() in the same frame buffer as the virtual scene.
if (gvr_viewer_type_ == GVR_VIEWER_TYPE_CARDBOARD) {
- DrawReticle();
+ DrawCardboardReticle();
}
frame.Unbind();
@@ -577,6 +583,7 @@
void TreasureHuntRenderer::OnResume() {
gvr_api_->ResumeTracking();
+ gvr_api_->RefreshViewerProfile();
gvr_audio_api_->Resume();
gvr_viewer_type_ = gvr_api_->GetViewerType();
ResumeControllerApiAsNeeded();
@@ -608,58 +615,51 @@
}
/**
- * Draws a frame for an eye.
+ * Draws a frame for a particular view.
*
- * @param eye The eye to render. Includes all required transformations.
+ * @param view The view to render: left, right, or both (multiview).
*/
-void TreasureHuntRenderer::DrawWorld(const gvr::Mat4f& view_matrix,
- const gvr::BufferViewport& viewport) {
- const gvr::Recti pixel_rect =
- CalculatePixelSpaceRect(render_size_, viewport.GetSourceUv());
-
- glViewport(pixel_rect.left, pixel_rect.bottom,
- pixel_rect.right - pixel_rect.left,
- pixel_rect.top - pixel_rect.bottom);
-
- CheckGLError("World drawing setup");
-
- // Set the position of the light
- light_pos_eye_space_ = MatrixVectorMul(view_matrix, light_pos_world_space_);
- const gvr::Mat4f perspective =
- PerspectiveMatrixFromView(viewport.GetSourceFov(), kZNear, kZFar);
- modelview_ = MatrixMul(view_matrix, model_cube_);
- modelview_projection_cube_ = MatrixMul(perspective, modelview_);
- DrawCube();
-
- // Set modelview_ for the floor, so we draw floor in the correct location
- modelview_ = MatrixMul(view_matrix, model_floor_);
- modelview_projection_floor_ = MatrixMul(perspective, modelview_);
- DrawFloor();
-
+void TreasureHuntRenderer::DrawWorld(ViewType view) {
+ if (view == kMultiview) {
+ glViewport(0, 0, render_size_.width / 2, render_size_.height);
+ } else {
+ const gvr::BufferViewport& viewport =
+ view == kLeftView ? viewport_left_ : viewport_right_;
+ const gvr::Recti pixel_rect =
+ CalculatePixelSpaceRect(render_size_, viewport.GetSourceUv());
+ glViewport(pixel_rect.left, pixel_rect.bottom,
+ pixel_rect.right - pixel_rect.left,
+ pixel_rect.top - pixel_rect.bottom);
+ }
+ DrawCube(view);
+ DrawFloor(view);
if (gvr_viewer_type_ == GVR_VIEWER_TYPE_DAYDREAM) {
- gvr::Mat4f controller_matrix =
- ControllerQuatToMatrix(gvr_controller_state_.GetOrientation());
- model_cursor_ = MatrixMul(controller_matrix, model_reticle_);
- modelview_ = MatrixMul(view_matrix, model_cursor_);
- modelview_projection_cursor_ = MatrixMul(perspective, modelview_);
- DrawCursor();
+ DrawDaydreamCursor(view);
}
}
-void TreasureHuntRenderer::DrawCube() {
+void TreasureHuntRenderer::DrawCube(ViewType view) {
glUseProgram(cube_program_);
- glUniform3fv(cube_light_pos_param_, 1, light_pos_eye_space_.data());
+ if (view == kMultiview) {
+ glUniform3fv(cube_light_pos_param_, 2,
+ VectorPairToGLArray(light_pos_eye_space_).data());
+ glUniformMatrix4fv(cube_modelview_param_, 2, GL_FALSE,
+ MatrixPairToGLArray(modelview_cube_).data());
+ glUniformMatrix4fv(cube_modelview_projection_param_, 2, GL_FALSE,
+ MatrixPairToGLArray(modelview_projection_cube_).data());
+ } else {
+ glUniform3fv(cube_light_pos_param_, 1, light_pos_eye_space_[view].data());
+ glUniformMatrix4fv(cube_modelview_param_, 1, GL_FALSE,
+ MatrixToGLArray(modelview_cube_[view]).data());
+ glUniformMatrix4fv(
+ cube_modelview_projection_param_, 1, GL_FALSE,
+ MatrixToGLArray(modelview_projection_cube_[view]).data());
+ }
// Set the Model in the shader, used to calculate lighting
glUniformMatrix4fv(cube_model_param_, 1, GL_FALSE,
MatrixToGLArray(model_cube_).data());
- // Set the ModelView in the shader, used to calculate lighting
- glUniformMatrix4fv(cube_modelview_param_, 1, GL_FALSE,
- MatrixToGLArray(modelview_).data());
- // Set the ModelViewProjection matrix in the shader.
- glUniformMatrix4fv(cube_modelview_projection_param_, 1, GL_FALSE,
- MatrixToGLArray(modelview_projection_cube_).data());
// Set the position of the cube
glVertexAttribPointer(cube_position_param_, kCoordsPerVertex, GL_FLOAT, false,
@@ -692,17 +692,27 @@
CheckGLError("Drawing cube");
}
-void TreasureHuntRenderer::DrawFloor() {
+void TreasureHuntRenderer::DrawFloor(ViewType view) {
glUseProgram(floor_program_);
- // Set ModelView, MVP, position, normals, and color.
- glUniform3fv(floor_light_pos_param_, 1, light_pos_eye_space_.data());
+ if (view == kMultiview) {
+ glUniform3fv(floor_light_pos_param_, 2,
+ VectorPairToGLArray(light_pos_eye_space_).data());
+ glUniformMatrix4fv(floor_modelview_param_, 2, GL_FALSE,
+ MatrixPairToGLArray(modelview_floor_).data());
+ glUniformMatrix4fv(floor_modelview_projection_param_, 2, GL_FALSE,
+ MatrixPairToGLArray(modelview_projection_floor_).data());
+ } else {
+ glUniform3fv(floor_light_pos_param_, 1, light_pos_eye_space_[view].data());
+ glUniformMatrix4fv(floor_modelview_param_, 1, GL_FALSE,
+ MatrixToGLArray(modelview_floor_[view]).data());
+ glUniformMatrix4fv(
+ floor_modelview_projection_param_, 1, GL_FALSE,
+ MatrixToGLArray(modelview_projection_floor_[view]).data());
+ }
+
glUniformMatrix4fv(floor_model_param_, 1, GL_FALSE,
MatrixToGLArray(model_floor_).data());
- glUniformMatrix4fv(floor_modelview_param_, 1, GL_FALSE,
- MatrixToGLArray(modelview_).data());
- glUniformMatrix4fv(floor_modelview_projection_param_, 1, GL_FALSE,
- MatrixToGLArray(modelview_projection_floor_).data());
glVertexAttribPointer(floor_position_param_, kCoordsPerVertex, GL_FLOAT,
false, 0, floor_vertices_);
glVertexAttrib3f(floor_normal_param_, 0.0f, 1.0f, 0.0f);
@@ -715,10 +725,17 @@
CheckGLError("Drawing floor");
}
-void TreasureHuntRenderer::DrawCursor() {
+void TreasureHuntRenderer::DrawDaydreamCursor(ViewType view) {
glUseProgram(reticle_program_);
- glUniformMatrix4fv(reticle_modelview_projection_param_, 1, GL_FALSE,
- MatrixToGLArray(modelview_projection_cursor_).data());
+ if (view == kMultiview) {
+ glUniformMatrix4fv(
+ reticle_modelview_projection_param_, 2, GL_FALSE,
+ MatrixPairToGLArray(modelview_projection_cursor_).data());
+ } else {
+ glUniformMatrix4fv(
+ reticle_modelview_projection_param_, 1, GL_FALSE,
+ MatrixToGLArray(modelview_projection_cursor_[view]).data());
+ }
glVertexAttribPointer(reticle_position_param_, kCoordsPerVertex, GL_FLOAT,
false, 0, reticle_vertices_);
glEnableVertexAttribArray(reticle_position_param_);
@@ -727,7 +744,7 @@
CheckGLError("Drawing cursor");
}
-void TreasureHuntRenderer::DrawReticle() {
+void TreasureHuntRenderer::DrawCardboardReticle() {
glViewport(0, 0, reticle_render_size_.width, reticle_render_size_.height);
glUseProgram(reticle_program_);
const gvr::Mat4f uniform_matrix = {{{1.f, 0.f, 0.f, 0.f},
diff --git a/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.h b/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.h
index 3f93241..b7b8e97 100644
--- a/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.h
+++ b/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_renderer.h
@@ -90,28 +90,34 @@
*/
int LoadGLShader(int type, const char** shadercode);
+ enum ViewType {
+ kLeftView,
+ kRightView,
+ kMultiview
+ };
+
/**
- * Draws all world-space objects for one eye.
+ * Draws all world-space objects for the given view type.
*
- * @param view_matrix View transformation for the current eye.
- * @param viewport The buffer viewport for which we are rendering.
+ * @param view Specifies which view we are rendering.
*/
- void DrawWorld(const gvr::Mat4f& view_matrix,
- const gvr::BufferViewport& viewport);
+ void DrawWorld(ViewType view);
/**
* Draws the reticle. The reticle is positioned using viewport parameters,
* so no data about its eye-space position is needed here.
*/
- void DrawReticle();
+ void DrawCardboardReticle();
/**
* Draw the cube.
*
* We've set all of our transformation matrices. Now we simply pass them
* into the shader.
+ *
+ * @param view Specifies which eye we are rendering: left, right, or both.
*/
- void DrawCube();
+ void DrawCube(ViewType view);
/**
* Draw the floor.
@@ -119,16 +125,20 @@
* This feeds in data for the floor into the shader. Note that this doesn't
* feed in data about position of the light, so if we rewrite our code to
* draw the floor first, the lighting might look strange.
+ *
+ * @param view Specifies which eye we are rendering: left, right, or both.
*/
- void DrawFloor();
+ void DrawFloor(ViewType view);
/**
* Draws the cursor.
*
* We've set all of our transformation matrices. Now we simply pass them
* into the shader.
+ *
+ * @param view Specifies which eye we are rendering: left, right, or both.
*/
- void DrawCursor();
+ void DrawDaydreamCursor(ViewType view);
/**
* Find a new random position for the object.
@@ -193,7 +203,8 @@
std::unique_ptr<gvr::AudioApi> gvr_audio_api_;
std::unique_ptr<gvr::BufferViewportList> viewport_list_;
std::unique_ptr<gvr::SwapChain> swapchain_;
- gvr::BufferViewport scratch_viewport_;
+ gvr::BufferViewport viewport_left_;
+ gvr::BufferViewport viewport_right_;
std::vector<float> lightpos_;
@@ -232,24 +243,31 @@
const gvr::Sizei reticle_render_size_;
const std::array<float, 4> light_pos_world_space_;
- std::array<float, 4> light_pos_eye_space_;
gvr::Mat4f head_view_;
gvr::Mat4f model_cube_;
gvr::Mat4f camera_;
gvr::Mat4f view_;
- gvr::Mat4f modelview_projection_cube_;
- gvr::Mat4f modelview_projection_floor_;
- gvr::Mat4f modelview_projection_cursor_;
- gvr::Mat4f modelview_;
gvr::Mat4f model_floor_;
gvr::Mat4f model_reticle_;
gvr::Mat4f model_cursor_;
gvr::Sizei render_size_;
+ // View-dependent values. These are stored in length two arrays to allow
+ // syncing with uniforms consumed by the multiview vertex shader. For
+ // simplicity, we stash valid values in both elements (left, right) of these
+ // arrays even when multiview is disabled.
+ std::array<float, 3> light_pos_eye_space_[2];
+ gvr::Mat4f modelview_projection_cube_[2];
+ gvr::Mat4f modelview_projection_floor_[2];
+ gvr::Mat4f modelview_projection_cursor_[2];
+ gvr::Mat4f modelview_cube_[2];
+ gvr::Mat4f modelview_floor_[2];
+
int score_;
float object_distance_;
float reticle_distance_;
+ bool multiview_enabled_;
gvr::AudioSourceId audio_source_id_;
diff --git a/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_shaders.h b/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_shaders.h
new file mode 100644
index 0000000..42d3ed5
--- /dev/null
+++ b/samples/ndk-treasurehunt/src/main/jni/treasure_hunt_shaders.h
@@ -0,0 +1,186 @@
+/* Copyright 2017 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TREASUREHUNT_APP_SRC_MAIN_JNI_TREASUREHUNTSHADERS_H_ // NOLINT
+#define TREASUREHUNT_APP_SRC_MAIN_JNI_TREASUREHUNTSHADERS_H_ // NOLINT
+
+// Each shader has two variants: a single-eye ES 2.0 variant, and a multiview
+// ES 3.0 variant. The multiview vertex shaders use transforms defined by
+// arrays of mat4 uniforms, using gl_ViewID_OVR to determine the array index.
+
+static const char* kDiffuseLightingVertexShaders[] = {
+ R"glsl(
+ uniform mat4 u_Model;
+ uniform mat4 u_MVP;
+ uniform mat4 u_MVMatrix;
+ uniform vec3 u_LightPos;
+ attribute vec4 a_Position;
+ attribute vec4 a_Color;
+ attribute vec3 a_Normal;
+ varying vec4 v_Color;
+ varying vec3 v_Grid;
+
+ void main() {
+ v_Grid = vec3(u_Model * a_Position);
+ vec3 modelViewVertex = vec3(u_MVMatrix * a_Position);
+ vec3 modelViewNormal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));
+ float distance = length(u_LightPos - modelViewVertex);
+ vec3 lightVector = normalize(u_LightPos - modelViewVertex);
+ float diffuse = max(dot(modelViewNormal, lightVector), 0.5);
+ diffuse = diffuse * (1.0 / (1.0 + (0.00001 * distance * distance)));
+ v_Color = vec4(a_Color.rgb * diffuse, a_Color.a);
+ gl_Position = u_MVP * a_Position;
+ })glsl",
+
+ R"glsl(#version 300 es
+ #extension GL_OVR_multiview2 : enable
+
+ layout(num_views=2) in;
+
+ uniform mat4 u_Model;
+ uniform mat4 u_MVP[2];
+ uniform mat4 u_MVMatrix[2];
+ uniform vec3 u_LightPos[2];
+ in vec4 a_Position;
+ in vec4 a_Color;
+ in vec3 a_Normal;
+ out vec4 v_Color;
+ out vec3 v_Grid;
+
+ void main() {
+ mat4 mvp = u_MVP[gl_ViewID_OVR];
+ mat4 modelview = u_MVMatrix[gl_ViewID_OVR];
+ vec3 lightpos = u_LightPos[gl_ViewID_OVR];
+ v_Grid = vec3(u_Model * a_Position);
+ vec3 modelViewVertex = vec3(modelview * a_Position);
+ vec3 modelViewNormal = vec3(modelview * vec4(a_Normal, 0.0));
+ float distance = length(lightpos - modelViewVertex);
+ vec3 lightVector = normalize(lightpos - modelViewVertex);
+ float diffuse = max(dot(modelViewNormal, lightVector), 0.5);
+ diffuse = diffuse * (1.0 / (1.0 + (0.00001 * distance * distance)));
+ v_Color = vec4(a_Color.rgb * diffuse, a_Color.a);
+ gl_Position = mvp * a_Position;
+ })glsl"
+};
+
+static const char* kGridFragmentShaders[] = {
+ R"glsl(
+ precision mediump float;
+ varying vec4 v_Color;
+ varying vec3 v_Grid;
+
+ void main() {
+ float depth = gl_FragCoord.z / gl_FragCoord.w;
+ if ((mod(abs(v_Grid.x), 10.0) < 0.1) ||
+ (mod(abs(v_Grid.z), 10.0) < 0.1)) {
+ gl_FragColor = max(0.0, (90.0-depth) / 90.0) *
+ vec4(1.0, 1.0, 1.0, 1.0) +
+ min(1.0, depth / 90.0) * v_Color;
+ } else {
+ gl_FragColor = v_Color;
+ }
+ })glsl",
+
+ R"glsl(#version 300 es
+
+ precision mediump float;
+ in vec4 v_Color;
+ in vec3 v_Grid;
+ out vec4 FragColor;
+
+ void main() {
+ float depth = gl_FragCoord.z / gl_FragCoord.w;
+ if ((mod(abs(v_Grid.x), 10.0) < 0.1) ||
+ (mod(abs(v_Grid.z), 10.0) < 0.1)) {
+ FragColor = max(0.0, (90.0-depth) / 90.0) *
+ vec4(1.0, 1.0, 1.0, 1.0) +
+ min(1.0, depth / 90.0) * v_Color;
+ } else {
+ FragColor = v_Color;
+ }
+ })glsl"
+};
+
+static const char* kPassthroughFragmentShaders[] = {
+ R"glsl(
+ precision mediump float;
+ varying vec4 v_Color;
+
+ void main() {
+ gl_FragColor = v_Color;
+ })glsl",
+
+ R"glsl(#version 300 es
+
+ precision mediump float;
+ in vec4 v_Color;
+ out vec4 FragColor;
+
+ void main() {
+ FragColor = v_Color;
+ })glsl"
+};
+
+static const char* kReticleVertexShaders[] = { R"glsl(
+ uniform mat4 u_MVP;
+ attribute vec4 a_Position;
+ varying vec2 v_Coords;
+
+ void main() {
+ v_Coords = a_Position.xy;
+ gl_Position = u_MVP * a_Position;
+ })glsl",
+
+ R"glsl(#version 300 es
+ #extension GL_OVR_multiview2 : enable
+
+ layout(num_views=2) in;
+ uniform mat4 u_MVP[2];
+ in vec4 a_Position;
+ out vec2 v_Coords;
+
+ void main() {
+ v_Coords = a_Position.xy;
+ gl_Position = u_MVP[gl_ViewID_OVR] * a_Position;
+ })glsl"
+};
+
+static const char* kReticleFragmentShaders[] = { R"glsl(
+ precision mediump float;
+
+ varying vec2 v_Coords;
+
+ void main() {
+ float r = length(v_Coords);
+ float alpha = smoothstep(0.5, 0.6, r) * (1.0 - smoothstep(0.8, 0.9, r));
+ if (alpha == 0.0) discard;
+ gl_FragColor = vec4(alpha);
+ })glsl",
+
+ R"glsl(#version 300 es
+ precision mediump float;
+
+ in vec2 v_Coords;
+ out vec4 FragColor;
+
+ void main() {
+ float r = length(v_Coords);
+ float alpha = smoothstep(0.5, 0.6, r) * (1.0 - smoothstep(0.8, 0.9, r));
+ if (alpha == 0.0) discard;
+ FragColor = vec4(alpha);
+ })glsl"
+};
+
+#endif // TREASUREHUNT_APP_SRC_MAIN_JNI_TREASUREHUNTSHADERS_H_ // NOLINT
diff --git a/samples/sdk-controllerclient/build.gradle b/samples/sdk-controllerclient/build.gradle
index 6ba2910..880cee9 100644
--- a/samples/sdk-controllerclient/build.gradle
+++ b/samples/sdk-controllerclient/build.gradle
@@ -37,6 +37,6 @@
}
dependencies {
- compile 'com.google.vr:sdk-base:1.30.0'
- compile 'com.google.vr:sdk-controller:1.30.0'
+ compile 'com.google.vr:sdk-base:1.40.0'
+ compile 'com.google.vr:sdk-controller:1.40.0'
}
diff --git a/samples/sdk-controllerclient/src/main/AndroidManifest.xml b/samples/sdk-controllerclient/src/main/AndroidManifest.xml
index 8e49f9b..c6de2f6 100644
--- a/samples/sdk-controllerclient/src/main/AndroidManifest.xml
+++ b/samples/sdk-controllerclient/src/main/AndroidManifest.xml
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.vr.sdk.samples.controllerclient"
- android:versionCode="170215033"
- android:versionName="1.30.0">
+ android:versionCode="170314023"
+ android:versionName="1.40.0">
<uses-sdk android:minSdkVersion="24" android:targetSdkVersion="24" />
<application
diff --git a/samples/sdk-controllerclient/src/main/java/com/google/vr/sdk/samples/controllerclient/ControllerClientActivity.java b/samples/sdk-controllerclient/src/main/java/com/google/vr/sdk/samples/controllerclient/ControllerClientActivity.java
index 52132ed..836b31d 100644
--- a/samples/sdk-controllerclient/src/main/java/com/google/vr/sdk/samples/controllerclient/ControllerClientActivity.java
+++ b/samples/sdk-controllerclient/src/main/java/com/google/vr/sdk/samples/controllerclient/ControllerClientActivity.java
@@ -149,20 +149,29 @@
apiStatusView.setText(apiStatus);
controllerStateView.setText(ConnectionStates.toString(controllerState));
controller.update();
- controllerOrientationText.setText(
- " " + controller.orientation + "\n" + controller.orientation.toAxisAngleString());
+
+ float[] angles = new float[3];
+ controller.orientation.toYawPitchRollDegrees(angles);
+ controllerOrientationText.setText(String.format(
+ "%s\n%s\n[%4.0f\u00b0 y %4.0f\u00b0 p %4.0f\u00b0 r]",
+ controller.orientation,
+ controller.orientation.toAxisAngleString(),
+ angles[0], angles[1], angles[2]));
+
if (controller.isTouching) {
controllerTouchpadView.setText(
String.format("[%4.2f, %4.2f]", controller.touch.x, controller.touch.y));
} else {
controllerTouchpadView.setText("[ NO TOUCH ]");
}
+
controllerButtonView.setText(String.format("[%s][%s][%s][%s][%s]",
controller.appButtonState ? "A" : " ",
controller.homeButtonState ? "H" : " ",
controller.clickButtonState ? "T" : " ",
controller.volumeUpButtonState ? "+" : " ",
controller.volumeDownButtonState ? "-" : " "));
+
controllerBatteryView.setText(String.format("[level: %s][charging: %s]",
Controller.BatteryLevels.toString(controller.batteryLevelBucket),
controller.isCharging));
diff --git a/samples/sdk-simplepanowidget/build.gradle b/samples/sdk-simplepanowidget/build.gradle
index 007c6d5..cdb1d80 100644
--- a/samples/sdk-simplepanowidget/build.gradle
+++ b/samples/sdk-simplepanowidget/build.gradle
@@ -37,5 +37,5 @@
}
dependencies {
- compile 'com.google.vr:sdk-panowidget:1.30.0'
+ compile 'com.google.vr:sdk-panowidget:1.40.0'
}
diff --git a/samples/sdk-simplepanowidget/src/main/AndroidManifest.xml b/samples/sdk-simplepanowidget/src/main/AndroidManifest.xml
index 44ad7b0..717dc61 100644
--- a/samples/sdk-simplepanowidget/src/main/AndroidManifest.xml
+++ b/samples/sdk-simplepanowidget/src/main/AndroidManifest.xml
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.vr.sdk.samples.simplepanowidget"
- android:versionCode="170215033"
- android:versionName="1.30.0">
+ android:versionCode="170314023"
+ android:versionName="1.40.0">
<uses-sdk android:minSdkVersion="19" android:targetSdkVersion="22" />
<!--
diff --git a/samples/sdk-simplevideowidget/build.gradle b/samples/sdk-simplevideowidget/build.gradle
index 19abf3f..a96e27d 100644
--- a/samples/sdk-simplevideowidget/build.gradle
+++ b/samples/sdk-simplevideowidget/build.gradle
@@ -37,5 +37,5 @@
}
dependencies {
- compile 'com.google.vr:sdk-videowidget:1.30.0'
+ compile 'com.google.vr:sdk-videowidget:1.40.0'
}
diff --git a/samples/sdk-simplevideowidget/src/main/AndroidManifest.xml b/samples/sdk-simplevideowidget/src/main/AndroidManifest.xml
index 0f8b088..1c3b1d3 100644
--- a/samples/sdk-simplevideowidget/src/main/AndroidManifest.xml
+++ b/samples/sdk-simplevideowidget/src/main/AndroidManifest.xml
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.vr.sdk.samples.simplevideowidget"
- android:versionCode="170215033"
- android:versionName="1.30.0">
+ android:versionCode="170314023"
+ android:versionName="1.40.0">
<uses-sdk android:minSdkVersion="19" android:targetSdkVersion="22" />
diff --git a/samples/sdk-simplevideowidget/src/main/java/com/google/vr/sdk/samples/simplevideowidget/SimpleVrVideoActivity.java b/samples/sdk-simplevideowidget/src/main/java/com/google/vr/sdk/samples/simplevideowidget/SimpleVrVideoActivity.java
index f26b605..05f7716 100644
--- a/samples/sdk-simplevideowidget/src/main/java/com/google/vr/sdk/samples/simplevideowidget/SimpleVrVideoActivity.java
+++ b/samples/sdk-simplevideowidget/src/main/java/com/google/vr/sdk/samples/simplevideowidget/SimpleVrVideoActivity.java
@@ -173,7 +173,7 @@
// Save the intent. This allows the getIntent() call in onCreate() to use this new Intent during
// future invocations.
setIntent(intent);
- // Load the new image.
+ // Load the new video.
handleIntent(intent);
}
diff --git a/samples/sdk-treasurehunt/build.gradle b/samples/sdk-treasurehunt/build.gradle
index dbdbdf5..aa851a3 100644
--- a/samples/sdk-treasurehunt/build.gradle
+++ b/samples/sdk-treasurehunt/build.gradle
@@ -37,6 +37,6 @@
}
dependencies {
- compile 'com.google.vr:sdk-audio:1.30.0'
- compile 'com.google.vr:sdk-base:1.30.0'
+ compile 'com.google.vr:sdk-audio:1.40.0'
+ compile 'com.google.vr:sdk-base:1.40.0'
}
diff --git a/samples/sdk-treasurehunt/src/main/AndroidManifest.xml b/samples/sdk-treasurehunt/src/main/AndroidManifest.xml
index 2a5beb9..50a43b0 100644
--- a/samples/sdk-treasurehunt/src/main/AndroidManifest.xml
+++ b/samples/sdk-treasurehunt/src/main/AndroidManifest.xml
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.vr.sdk.samples.treasurehunt"
- android:versionCode="170215033"
- android:versionName="1.30.0">
+ android:versionCode="170314023"
+ android:versionName="1.40.0">
<!-- The GVR SDK requires API 19+ and OpenGL ES 2+. -->
<uses-sdk android:minSdkVersion="19" android:targetSdkVersion="24" />
diff --git a/samples/sdk-videoplayer/build.gradle b/samples/sdk-videoplayer/build.gradle
index d5b56b6..83ddd28 100644
--- a/samples/sdk-videoplayer/build.gradle
+++ b/samples/sdk-videoplayer/build.gradle
@@ -37,8 +37,8 @@
}
dependencies {
- compile 'com.google.android.exoplayer:exoplayer:r1.5.10'
+ compile 'com.google.android.exoplayer:exoplayer:r2.2.0'
- compile 'com.google.vr:sdk-base:1.30.0'
- compile 'com.google.vr:sdk-common:1.30.0'
+ compile 'com.google.vr:sdk-base:1.40.0'
+ compile 'com.google.vr:sdk-common:1.40.0'
}
diff --git a/samples/sdk-videoplayer/src/main/AndroidManifest.xml b/samples/sdk-videoplayer/src/main/AndroidManifest.xml
index ed061a5..b0b07e0 100644
--- a/samples/sdk-videoplayer/src/main/AndroidManifest.xml
+++ b/samples/sdk-videoplayer/src/main/AndroidManifest.xml
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.vr.sdk.samples.videoplayer"
- android:versionCode="170215033"
- android:versionName="1.30.0">
+ android:versionCode="170314023"
+ android:versionName="1.40.0">
<!-- The video API works on Daydream-ready devices with Asynchronous
Reprojection on Android N+ and OpenGL ES 2+.. -->
diff --git a/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/Configuration.java b/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/Configuration.java
index 0245013..2de4f2d 100644
--- a/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/Configuration.java
+++ b/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/Configuration.java
@@ -22,10 +22,10 @@
* EGL_EXT_protected_content.
*/
public class Configuration {
-
- // Create a secure EGL context in to test rendering L1 HD DRM.
- public static final boolean SECURE_EGL_CONTEXT = true;
-
// Use video sample that requires secure playback.
- public static final boolean REQUIRE_SECURE_PATH = true;
+ public static final boolean USE_DRM_VIDEO_SAMPLE = true;
+
+ // Create a protected asynchronous reprojection pipeline. This is required if a DRM video sample
+ // is played.
+ public static final boolean USE_PROTECTED_PIPELINE = USE_DRM_VIDEO_SAMPLE || true;
}
diff --git a/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/VideoExoPlayer.java b/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/VideoExoPlayer.java
deleted file mode 100644
index a3bece7..0000000
--- a/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/VideoExoPlayer.java
+++ /dev/null
@@ -1,531 +0,0 @@
-/*
- * Copyright 2017 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.vr.sdk.samples.videoplayer;
-
-import android.content.Context;
-import android.media.AudioManager;
-import android.media.MediaCodec;
-import android.os.Handler;
-import android.os.Looper;
-import android.text.TextUtils;
-import android.util.Log;
-import android.view.Surface;
-import com.google.android.exoplayer.DefaultLoadControl;
-import com.google.android.exoplayer.ExoPlaybackException;
-import com.google.android.exoplayer.ExoPlayer;
-import com.google.android.exoplayer.LoadControl;
-import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
-import com.google.android.exoplayer.MediaCodecSelector;
-import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
-import com.google.android.exoplayer.TrackRenderer;
-import com.google.android.exoplayer.chunk.ChunkSampleSource;
-import com.google.android.exoplayer.chunk.ChunkSource;
-import com.google.android.exoplayer.chunk.FormatEvaluator;
-import com.google.android.exoplayer.dash.DashChunkSource;
-import com.google.android.exoplayer.dash.DefaultDashTrackSelector;
-import com.google.android.exoplayer.dash.mpd.AdaptationSet;
-import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
-import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
-import com.google.android.exoplayer.dash.mpd.Period;
-import com.google.android.exoplayer.dash.mpd.UtcTimingElement;
-import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver;
-import com.google.android.exoplayer.drm.ExoMediaDrm.KeyRequest;
-import com.google.android.exoplayer.drm.ExoMediaDrm.ProvisionRequest;
-import com.google.android.exoplayer.drm.MediaDrmCallback;
-import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
-import com.google.android.exoplayer.drm.UnsupportedDrmException;
-import com.google.android.exoplayer.upstream.Allocator;
-import com.google.android.exoplayer.upstream.DataSource;
-import com.google.android.exoplayer.upstream.DefaultAllocator;
-import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
-import com.google.android.exoplayer.upstream.DefaultUriDataSource;
-import com.google.android.exoplayer.upstream.UriDataSource;
-import com.google.android.exoplayer.util.ManifestFetcher;
-import com.google.android.exoplayer.util.Util;
-import java.io.IOException;
-import java.util.UUID;
-import java.util.concurrent.CopyOnWriteArrayList;
-
-/**
- * Simple playback controller for video streams that wraps ExoPlayer.
- */
-/* package */ class VideoExoPlayer {
-
- private static final String TAG = "VideoExoPlayer";
-
- public static final int RENDERER_COUNT = 2;
- public static final int TYPE_VIDEO = 0;
- public static final int TYPE_AUDIO = 1;
-
- private static final int BUFFER_SEGMENT_SIZE = 64 * 1024;
- private static final int VIDEO_BUFFER_SEGMENTS = 200;
- private static final int AUDIO_BUFFER_SEGMENTS = 54;
- private static final int LIVE_EDGE_LATENCY_MS = 30000;
-
- private static final int SECURITY_LEVEL_UNKNOWN = -1;
- private static final int SECURITY_LEVEL_1 = 1;
- private static final int SECURITY_LEVEL_3 = 3;
-
- private final AudioManager audioManager;
-
- private final Handler mainHandler;
- private TrackRenderer audioRenderer;
- private TrackRenderer videoRenderer;
- private ExoPlayer player;
- private Surface surface;
- private AsyncRendererBuilder currentAsyncBuilder;
- private final CopyOnWriteArrayList<Listener> listeners;
-
- private boolean isSurfaceCreated;
- private boolean isPaused = false;
-
- /**
- * A listener for error reporting.
- */
- public interface Listener {
- void onError(Exception e);
- }
-
- /**
- * Creates a VideoExoPlayer.
- *
- * @param context The Application context.
- * @param streamUrl The URL to play.
- * @param context A function to call for DRM-related messages.
- * @param requiresSecurePlayback Whether secure playback is necessary.
- */
- public VideoExoPlayer(
- Context context,
- String streamUrl,
- MediaDrmCallback drmCallback,
- boolean requiresSecurePlayback) {
- audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
-
- Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
- String userAgent = Util.getUserAgent(context, "VRPlayMovies");
-
- player = ExoPlayer.Factory.newInstance(RENDERER_COUNT, 1000, 5000);
- mainHandler = new Handler();
- listeners = new CopyOnWriteArrayList<>();
-
- currentAsyncBuilder =
- new AsyncRendererBuilder(
- context, userAgent, streamUrl, requiresSecurePlayback, drmCallback, this);
- }
-
- /**
- * Adds a listener for crtical playback events.
- *
- * @param listener The Listener to add.
- */
- public void addListener(Listener listener) {
- listeners.add(listener);
- }
-
- /**
- * Removes a listener.
- *
- * @param listener The Listener to remove.
- */
- public void removeListener(Listener listener) {
- listeners.remove(listener);
- }
-
- /**
- * Initializes the player.
- */
- public void init() {
- currentAsyncBuilder.init();
- }
-
- /**
- * Releases the player. Call init() to initialize it again.
- */
- public void release() {
- releasePlayer();
- }
-
- /**
- * Sets the Surface that will contain video frames. This also starts playback of the player.
- *
- * @param surface The Surface that should receive video.
- */
- public void setSurface(Surface surface) {
- isSurfaceCreated = true;
- this.surface = surface;
- if (videoRenderer != null) {
- mainHandler.post(
- new Runnable() {
- @Override
- public void run() {
- beginPlayback();
- }
- });
- }
- }
-
- /**
- * Stops the player and releases it.
- */
- public void stop() {
- // Release the player instead of stopping so that an async prepare gets stopped.
- releasePlayer();
- audioManager.abandonAudioFocus(null);
- }
-
- /**
- * Gets whather playback is paused.
- *
- * @return Whether playback is paused.
- */
- public boolean isPaused() {
- return isPaused;
- }
-
- /**
- * Pauses or restarts the player.
- */
- public void togglePause() {
- Log.d(TAG, "togglePause()");
-
- if (player != null) {
- if (isPaused) {
- player.setPlayWhenReady(true);
- } else {
- player.setPlayWhenReady(false);
- }
- isPaused = !isPaused;
- }
- }
-
- /**
- * Starts playback (unpausing).
- */
- public void play() {
- if (player != null && isPaused) {
- togglePause();
- }
- }
-
- /**
- * Pauses playback.
- */
- public void pause() {
- if (player != null && !isPaused) {
- togglePause();
- }
- }
-
- private void beginPlayback() {
- player.addListener(new VideoLooperListener());
- player.prepare(videoRenderer, audioRenderer);
-
- player.sendMessage(videoRenderer, MediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface);
- player.seekTo(0);
- player.setPlayWhenReady(true);
- }
-
- private void releasePlayer() {
- if (currentAsyncBuilder != null) {
- currentAsyncBuilder.cancel();
- currentAsyncBuilder = null;
- }
- surface = null;
- if (player != null) {
- isPaused = false;
- player.release();
- player = null;
- }
- }
-
- private void onRenderers(TrackRenderer videoRenderer, TrackRenderer audioRenderer) {
- this.videoRenderer = videoRenderer;
- this.audioRenderer = audioRenderer;
-
- if (isSurfaceCreated) {
- beginPlayback();
- }
- }
-
- private void onRenderersError(Exception e) {
- Log.e(TAG, "Renderer init error: ", e);
- }
-
- private Looper getPlaybackLooper() {
- return player.getPlaybackLooper();
- }
-
- private Handler getMainHandler() {
- return mainHandler;
- }
-
- /**
- * Class to seek to the start of a video once it ends.
- */
- private final class VideoLooperListener implements ExoPlayer.Listener {
- @Override
- public void onPlayerError(ExoPlaybackException error) {
- Log.e(TAG, "ExoPlayer error", error);
- for (Listener listener : listeners) {
- listener.onError(error);
- }
- }
-
- @Override
- public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
- Log.i(TAG, "ExoPlayer state changed " + playWhenReady + " : " + playbackState);
- if (playWhenReady && playbackState == ExoPlayer.STATE_ENDED) {
- player.seekTo(0); // this line causes playback to loop
- }
- }
-
- @Override
- public void onPlayWhenReadyCommitted() {}
- }
-
- private static final class AsyncRendererBuilder
- implements ManifestFetcher.ManifestCallback<MediaPresentationDescription>,
- UtcTimingElementResolver.UtcTimingCallback {
-
- private final Context context;
- private final String userAgent;
- private final boolean requiresSecurePlayback;
- private final MediaDrmCallback drmCallback;
- private final VideoExoPlayer player;
- private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
- private final UriDataSource manifestDataSource;
-
- private boolean canceled;
- private MediaPresentationDescription manifest;
- private long elapsedRealtimeOffset;
-
- public AsyncRendererBuilder(
- Context context,
- String userAgent,
- String url,
- boolean requiresSecurePlayback,
- MediaDrmCallback drmCallback,
- VideoExoPlayer player) {
- this.context = context;
- this.userAgent = userAgent;
- this.requiresSecurePlayback = requiresSecurePlayback;
- this.drmCallback = drmCallback;
- this.player = player;
- MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
- manifestDataSource = new DefaultUriDataSource(context, userAgent);
- manifestFetcher = new ManifestFetcher<>(url, manifestDataSource, parser);
- }
-
- public void init() {
- manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this);
- }
-
- public void cancel() {
- canceled = true;
- }
-
- @Override
- public void onSingleManifest(MediaPresentationDescription manifest) {
- if (canceled) {
- return;
- }
-
- this.manifest = manifest;
- if (manifest.dynamic && manifest.utcTiming != null) {
- UtcTimingElementResolver.resolveTimingElement(
- manifestDataSource,
- manifest.utcTiming,
- manifestFetcher.getManifestLoadCompleteTimestamp(),
- this);
- } else {
- buildRenderers();
- }
- }
-
- @Override
- public void onSingleManifestError(IOException e) {
- if (canceled) {
- return;
- }
-
- player.onRenderersError(e);
- }
-
- @Override
- public void onTimestampResolved(UtcTimingElement utcTiming, long elapsedRealtimeOffset) {
- if (canceled) {
- return;
- }
-
- this.elapsedRealtimeOffset = elapsedRealtimeOffset;
- buildRenderers();
- }
-
- @Override
- public void onTimestampError(UtcTimingElement utcTiming, IOException e) {
- if (canceled) {
- return;
- }
-
- Log.e(TAG, "Failed to resolve UtcTiming element [" + utcTiming + "]", e);
- // Be optimistic and continue in the hope that the device clock is correct.
- buildRenderers();
- }
-
- private void buildRenderers() {
- Period period = manifest.getPeriod(0);
- Handler mainHandler = player.getMainHandler();
- LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
- DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, null);
-
- boolean hasContentProtection = false;
- for (int i = 0; i < period.adaptationSets.size(); i++) {
- AdaptationSet adaptationSet = period.adaptationSets.get(i);
- if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
- hasContentProtection |= adaptationSet.hasContentProtection();
- }
- }
-
- // Check drm support if necessary.
- boolean filterHdContent = false;
- StreamingDrmSessionManager drmSessionManager = null;
- if (hasContentProtection) {
- if (Util.SDK_INT < 18) {
- player.onRenderersError(
- new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
- return;
- }
- try {
- drmSessionManager =
- StreamingDrmSessionManager.newWidevineInstance(
- player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), null);
-
- if (!requiresSecurePlayback) {
- // Force to L3 to be able to direct to the Surface.
- drmSessionManager.setPropertyString("securityLevel", "L3");
- }
-
- filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
- } catch (UnsupportedDrmException e) {
- player.onRenderersError(e);
- return;
- }
- }
-
- // Build the video renderer.
- DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
- ChunkSource videoChunkSource =
- new DashChunkSource(
- manifestFetcher,
- DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
- videoDataSource,
- new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter),
- LIVE_EDGE_LATENCY_MS,
- elapsedRealtimeOffset,
- mainHandler,
- null,
- 0);
- ChunkSampleSource videoSampleSource =
- new ChunkSampleSource(
- videoChunkSource,
- loadControl,
- VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE,
- mainHandler,
- null,
- VideoExoPlayer.TYPE_VIDEO);
- TrackRenderer videoRenderer =
- new MediaCodecVideoTrackRenderer(
- context,
- videoSampleSource,
- MediaCodecSelector.DEFAULT,
- MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT,
- 5000,
- drmSessionManager,
- true,
- mainHandler,
- null,
- 50);
-
- // Build the audio renderer.
- DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
- ChunkSource audioChunkSource =
- new DashChunkSource(
- manifestFetcher,
- DefaultDashTrackSelector.newAudioInstance(),
- audioDataSource,
- null,
- LIVE_EDGE_LATENCY_MS,
- elapsedRealtimeOffset,
- mainHandler,
- null,
- 0);
- ChunkSampleSource audioSampleSource =
- new ChunkSampleSource(
- audioChunkSource,
- loadControl,
- AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE,
- mainHandler,
- null,
- VideoExoPlayer.TYPE_AUDIO);
- TrackRenderer audioRenderer =
- new MediaCodecAudioTrackRenderer(
- audioSampleSource,
- MediaCodecSelector.DEFAULT,
- drmSessionManager,
- true,
- mainHandler,
- null);
-
- // Invoke the callback.
- player.onRenderers(videoRenderer, audioRenderer);
- }
-
- private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) {
- String securityLevelProperty = sessionManager.getPropertyString("securityLevel");
- Log.d(TAG, "WV security: " + securityLevelProperty);
- return securityLevelProperty.equals("L1")
- ? SECURITY_LEVEL_1
- : securityLevelProperty.equals("L3") ? SECURITY_LEVEL_3 : SECURITY_LEVEL_UNKNOWN;
- }
- }
-
- /* package */ static final class WidevineTestMediaDrmCallback implements MediaDrmCallback {
-
- private static final String WIDEVINE_GTS_DEFAULT_BASE_URI =
- "http://wv-staging-proxy.appspot.com/proxy?provider=YouTube&video_id=";
-
- private final String defaultUri;
-
- public WidevineTestMediaDrmCallback(String videoId) {
- defaultUri = WIDEVINE_GTS_DEFAULT_BASE_URI + videoId;
- }
-
- @Override
- public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException {
- String url = request.getDefaultUrl() + "&signedRequest=" + new String(request.getData());
- return Util.executePost(url, null, null);
- }
-
- @Override
- public byte[] executeKeyRequest(UUID uuid, KeyRequest request) throws IOException {
- String url = request.getDefaultUrl();
- if (TextUtils.isEmpty(url)) {
- url = defaultUri;
- }
- return Util.executePost(url, request.getData(), null);
- }
- }
-}
diff --git a/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/VideoExoPlayer2.java b/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/VideoExoPlayer2.java
new file mode 100644
index 0000000..1eba761
--- /dev/null
+++ b/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/VideoExoPlayer2.java
@@ -0,0 +1,237 @@
+// Copyright 2016 Google Inc. All Rights Reserved.
+package com.google.vr.sdk.samples.videoplayer;
+
+import android.content.Context;
+import android.net.Uri;
+import android.util.Log;
+import android.view.Surface;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.DefaultLoadControl;
+import com.google.android.exoplayer2.ExoPlayerFactory;
+import com.google.android.exoplayer2.SimpleExoPlayer;
+import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
+import com.google.android.exoplayer2.drm.DrmSessionManager;
+import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
+import com.google.android.exoplayer2.drm.FrameworkMediaDrm;
+import com.google.android.exoplayer2.drm.HttpMediaDrmCallback;
+import com.google.android.exoplayer2.drm.UnsupportedDrmException;
+import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
+import com.google.android.exoplayer2.source.ExtractorMediaSource;
+import com.google.android.exoplayer2.source.MediaSource;
+import com.google.android.exoplayer2.source.dash.DashMediaSource;
+import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource;
+import com.google.android.exoplayer2.source.hls.HlsMediaSource;
+import com.google.android.exoplayer2.source.smoothstreaming.DefaultSsChunkSource;
+import com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource;
+import com.google.android.exoplayer2.trackselection.AdaptiveVideoTrackSelection;
+import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
+import com.google.android.exoplayer2.trackselection.TrackSelection;
+import com.google.android.exoplayer2.upstream.DataSource;
+import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
+import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
+import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
+import com.google.android.exoplayer2.upstream.HttpDataSource;
+import com.google.android.exoplayer2.util.Util;
+import java.util.Map;
+import java.util.UUID;
+
+/**
+ * Wrapper for ExoPlayer2 functionality. Handles all the necessary setup for video playback and
+ * provides access to simple video playback controls. Plays unprotected videos from a specified Uri,
+ * and plays DRM video under the Widevine test license.
+ */
+public class VideoExoPlayer2 {
+
+ private static final String TAG = VideoExoPlayer2.class.getSimpleName();
+ private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter();
+
+ private final Context context;
+ private final String userAgent;
+ private final DataSource.Factory mediaDataSourceFactory;
+ private SimpleExoPlayer player;
+
+ public VideoExoPlayer2(Context context) {
+ this.context = context;
+ userAgent = Util.getUserAgent(context, "VideoExoPlayer");
+ mediaDataSourceFactory = buildDataSourceFactory(true);
+ }
+
+ public void play() {
+ if (player != null) {
+ player.setPlayWhenReady(true);
+ }
+ }
+
+ public void pause() {
+ if (player != null) {
+ player.setPlayWhenReady(false);
+ }
+ }
+
+ public void togglePause() {
+ if (isPaused()) {
+ play();
+ } else {
+ pause();
+ }
+ }
+
+ public boolean isPaused() {
+ return !player.getPlayWhenReady();
+ }
+
+ public SimpleExoPlayer getPlayer() {
+ return player;
+ }
+
+ /**
+ * Sets the Surface for the video player to decode frames into. When the Surface is set, the video
+ * player will begin to autoplay.
+ *
+ * @param surface The Surface to consume video frames.
+ */
+ public void setSurface(Surface surface) {
+ if (player != null) {
+ player.setVideoSurface(surface);
+ } else {
+ Log.e(TAG, "Error: video player has not been initialized. Cannot set Surface.");
+ }
+ }
+
+ /**
+ * Inialializes the video player. The player can playback a video at the specified Uri. DRM videos
+ * hosted by the Widevine test license can be played
+ *
+ * <p>Note: this should be called in the host activity's onStart() method.
+ *
+ * @param uri The uri of the source video to play.
+ * @param optionalDrmVideoId The ID of the video under the Widevine test license.
+ */
+ public void initPlayer(Uri uri, String optionalDrmVideoId) throws UnsupportedDrmException {
+ DrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
+ if (optionalDrmVideoId != null) {
+ try {
+ drmSessionManager =
+ buildDrmSessionManager(
+ C.WIDEVINE_UUID,
+ getWidevineTestLicenseUrl(optionalDrmVideoId),
+ null /* keyRequestProperties */);
+ } catch (UnsupportedDrmException e) {
+ String errorString =
+ e.reason == UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME
+ ? "UnsupportedDrmException"
+ : "UnknownDrmException";
+ Log.e(TAG, "Error: " + errorString, e);
+ throw e;
+ }
+ }
+
+ TrackSelection.Factory videoTrackSelectionFactory =
+ new AdaptiveVideoTrackSelection.Factory(BANDWIDTH_METER);
+ DefaultTrackSelector trackSelector = new DefaultTrackSelector(videoTrackSelectionFactory);
+ player =
+ ExoPlayerFactory.newSimpleInstance(
+ context,
+ trackSelector,
+ new DefaultLoadControl(),
+ drmSessionManager,
+ SimpleExoPlayer.EXTENSION_RENDERER_MODE_OFF);
+
+ // Auto play the video.
+ player.setPlayWhenReady(true);
+
+ MediaSource mediaSource = buildMediaSource(uri);
+ // Prepare the player with the source.
+ player.prepare(mediaSource);
+ }
+
+ /**
+ * Releases the video player. Once the player is released, the client must call initPlayer()
+ * before the video player can be used again.
+ *
+ * <p>Note: this should be called in the host activity's onStop().
+ */
+ public void releasePlayer() {
+ if (player != null) {
+ player.release();
+ player = null;
+ }
+ }
+
+ private static String getWidevineTestLicenseUrl(String id) {
+ return "https://proxy.uat.widevine.com/proxy?video_id=" + id + "&provider=widevine_test";
+ }
+
+ /**
+ * Returns a new DataSource factory.
+ *
+ * @param useBandwidthMeter Whether to set {@link #BANDWIDTH_METER} as a listener to the new
+ * DataSource factory.
+ * @return A new DataSource factory.
+ */
+ private DataSource.Factory buildDataSourceFactory(boolean useBandwidthMeter) {
+ return buildDataSourceFactory(useBandwidthMeter ? BANDWIDTH_METER : null);
+ }
+
+ /**
+ * Returns a new HttpDataSource factory.
+ *
+ * @param useBandwidthMeter Whether to set {@link #BANDWIDTH_METER} as a listener to the new
+ * DataSource factory.
+ * @return A new HttpDataSource factory.
+ */
+ private HttpDataSource.Factory buildHttpDataSourceFactory(boolean useBandwidthMeter) {
+ return buildHttpDataSourceFactory(useBandwidthMeter ? BANDWIDTH_METER : null);
+ }
+
+ private DrmSessionManager<FrameworkMediaCrypto> buildDrmSessionManager(
+ UUID uuid, String licenseUrl, Map<String, String> keyRequestProperties)
+ throws UnsupportedDrmException {
+ if (Util.SDK_INT < 18) {
+ return null;
+ }
+ HttpMediaDrmCallback drmCallback =
+ new HttpMediaDrmCallback(
+ licenseUrl, buildHttpDataSourceFactory(false), keyRequestProperties);
+ return new DefaultDrmSessionManager<>(
+ uuid, FrameworkMediaDrm.newInstance(uuid), drmCallback, null, null, null);
+ }
+
+ private MediaSource buildMediaSource(Uri uri) {
+ int type = Util.inferContentType(uri.getLastPathSegment());
+ switch (type) {
+ case C.TYPE_SS:
+ return new SsMediaSource(
+ uri,
+ buildDataSourceFactory(false),
+ new DefaultSsChunkSource.Factory(mediaDataSourceFactory),
+ null,
+ null);
+ case C.TYPE_DASH:
+ return new DashMediaSource(
+ uri,
+ buildDataSourceFactory(false),
+ new DefaultDashChunkSource.Factory(mediaDataSourceFactory),
+ null,
+ null);
+ case C.TYPE_HLS:
+ return new HlsMediaSource(uri, mediaDataSourceFactory, null, null);
+ case C.TYPE_OTHER:
+ return new ExtractorMediaSource(
+ uri, mediaDataSourceFactory, new DefaultExtractorsFactory(), null, null);
+ default:
+ {
+ throw new IllegalStateException("Unsupported type: " + type);
+ }
+ }
+ }
+
+ private DataSource.Factory buildDataSourceFactory(DefaultBandwidthMeter bandwidthMeter) {
+ return new DefaultDataSourceFactory(
+ context, bandwidthMeter, buildHttpDataSourceFactory(bandwidthMeter));
+ }
+
+ public HttpDataSource.Factory buildHttpDataSourceFactory(DefaultBandwidthMeter bandwidthMeter) {
+ return new DefaultHttpDataSourceFactory(userAgent, bandwidthMeter);
+ }
+}
diff --git a/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/WatchVideoActivity.java b/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/WatchVideoActivity.java
index 677f08a..db17194 100644
--- a/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/WatchVideoActivity.java
+++ b/samples/sdk-videoplayer/src/main/java/com/google/vr/sdk/samples/videoplayer/WatchVideoActivity.java
@@ -16,6 +16,7 @@
package com.google.vr.sdk.samples.videoplayer;
import android.app.Activity;
+import android.net.Uri;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.os.Handler;
@@ -25,8 +26,7 @@
import android.view.MotionEvent;
import android.view.Surface;
import android.view.View;
-import com.google.android.exoplayer.drm.UnsupportedDrmException;
-import com.google.android.exoplayer.util.Util;
+import com.google.android.exoplayer2.drm.UnsupportedDrmException;
import com.google.vr.ndk.base.AndroidCompat;
import com.google.vr.ndk.base.GvrLayout;
import com.google.vr.ndk.base.GvrLayout.ExternalSurfaceListener;
@@ -44,13 +44,13 @@
* VideoSceneRenderer} adds a {@link BufferViewport} per eye to describe where video should be
* drawn.
*/
-public class WatchVideoActivity extends Activity implements VideoExoPlayer.Listener {
+public class WatchVideoActivity extends Activity {
private static final String TAG = WatchVideoActivity.class.getSimpleName();
private GvrLayout gvrLayout;
private GLSurfaceView surfaceView;
private VideoSceneRenderer renderer;
- private VideoExoPlayer videoPlayer;
+ private VideoExoPlayer2 videoPlayer;
private boolean hasFirstFrame;
// Transform a quad that fills the clip box at Z=0 to a 16:9 screen at Z=-4. Note that the matrix
@@ -84,7 +84,6 @@
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
-
setImmersiveSticky();
getWindow()
.getDecorView()
@@ -149,7 +148,8 @@
gvrLayout.enableAsyncReprojectionVideoSurface(
videoSurfaceListener,
new Handler(Looper.getMainLooper()),
- Configuration.SECURE_EGL_CONTEXT /* Whether video playback needs a secure context. */);
+ /* Whether video playback should use a protected reprojection pipeline. */
+ Configuration.USE_PROTECTED_PIPELINE);
boolean isAsyncReprojectionEnabled = gvrLayout.setAsyncReprojectionEnabled(true);
if (!isSurfaceEnabled || !isAsyncReprojectionEnabled) {
@@ -191,39 +191,24 @@
}
private void initVideoPlayer() {
- // L3 content - can be directed to a Surface.
- final String streamUrlL3 =
- "http://www.youtube.com/api/manifest/dash/id/d286538032258a1c/source/youtube?"
- + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
- + "&ipbits=0&expire=19000000000&signature=477CF7D478BE26C205045D507E9358F85F84C065."
- + "8971631EB657BC33EC2F48A2FF4211956760C3E9&key=ik0";
- final String contentIdL3 = "d286538032258a1c";
+ videoPlayer = new VideoExoPlayer2(getApplication());
+ Uri streamUri;
+ String drmVideoId = null;
- // L1 content - requires secure path.
- final String streamUrlL1 =
- "http://www.youtube.com/api/manifest/dash/id/0894c7c8719b28a0/source/youtube?"
- + "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
- + "&ipbits=0&expire=19000000000&signature=A41D835C7387885A4A820628F57E481E00095931."
- + "9D50DBEEB5E37344647EE11BDA129A7FCDE8B7B9&key=ik0";
- final String contentIdL1 = "0894c7c8719b28a0";
-
- if (Configuration.REQUIRE_SECURE_PATH) {
- videoPlayer =
- new VideoExoPlayer(
- this,
- streamUrlL1,
- new VideoExoPlayer.WidevineTestMediaDrmCallback(contentIdL1),
- true);
+ if (Configuration.USE_DRM_VIDEO_SAMPLE) {
+ // Protected video, requires a secure path for playback.
+ streamUri = Uri.parse("https://storage.googleapis.com/wvmedia/cenc/h264/tears/tears.mpd");
+ drmVideoId = "0894c7c8719b28a0";
} else {
- videoPlayer =
- new VideoExoPlayer(
- this,
- streamUrlL3,
- new VideoExoPlayer.WidevineTestMediaDrmCallback(contentIdL3),
- false);
+ // Unprotected video, does not require a secure path for playback.
+ streamUri = Uri.parse("https://storage.googleapis.com/wvmedia/clear/h264/tears/tears.mpd");
}
- videoPlayer.addListener(this);
- videoPlayer.init();
+
+ try {
+ videoPlayer.initPlayer(streamUri, drmVideoId);
+ } catch (UnsupportedDrmException e) {
+ Log.e(TAG, "Error initializing video player", e);
+ }
}
@Override
@@ -268,9 +253,8 @@
@Override
protected void onStop() {
- videoPlayer.stop();
if (videoPlayer != null) {
- videoPlayer.release();
+ videoPlayer.releasePlayer();
videoPlayer = null;
}
// Pause the gvrLayout and surfaceView here. The video Surface is guaranteed to be detached and
@@ -285,31 +269,10 @@
@Override
protected void onDestroy() {
gvrLayout.shutdown();
- if (videoPlayer != null) {
- videoPlayer.release();
- videoPlayer = null;
- }
super.onDestroy();
}
@Override
- public void onError(Exception e) {
- if (e instanceof UnsupportedDrmException) {
- // Special case DRM failures.
- UnsupportedDrmException unsupportedDrmException = (UnsupportedDrmException) e;
- int stringId =
- Util.SDK_INT < 18
- ? R.string.drm_error_not_supported
- : unsupportedDrmException.reason == UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME
- ? R.string.drm_error_unsupported_scheme
- : R.string.drm_error_unknown;
- Log.e(TAG, "UnsupportedDrmException " + getResources().getString(stringId));
- } else {
- Log.e(TAG, "UnsupportedDrmException " + getResources().getString(R.string.playback_error));
- }
- }
-
- @Override
public boolean dispatchKeyEvent(KeyEvent event) {
// Avoid accidental volume key presses while the phone is in the VR headset.
if (event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_UP
@@ -338,4 +301,13 @@
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
}
+
+ protected VideoExoPlayer2 getVideoExoPlayer() {
+ return videoPlayer;
+ }
+
+ /** @return {@code true} if the first video frame has played **/
+ protected boolean hasFirstFrame() {
+ return hasFirstFrame;
+ }
}