Add DaydreamVR native libraries and services
Upstreaming the main VR system components from master-dreamos-dev
into goog/master.
Bug: None
Test: `m -j32` succeeds. Sailfish boots and basic_vr sample app works
Change-Id: I853015872afc443aecee10411ef2d6b79184d051
diff --git a/libs/vr/libposepredictor/Android.mk b/libs/vr/libposepredictor/Android.mk
new file mode 100644
index 0000000..030f79c
--- /dev/null
+++ b/libs/vr/libposepredictor/Android.mk
@@ -0,0 +1,51 @@
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+sourceFiles := \
+ linear_pose_predictor.cpp \
+
+includeFiles := \
+ $(LOCAL_PATH)/include
+
+staticLibraries := \
+ libdvrcommon \
+ libsensor \
+ libpdx_default_transport \
+
+sharedLibraries := \
+
+
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES := $(sourceFiles)
+LOCAL_C_INCLUDES := $(includeFiles)
+LOCAL_CFLAGS := -DLOG_TAG=\"libposepredictor\"
+LOCAL_CFLAGS += -DTRACE=0
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(includeFiles)
+LOCAL_STATIC_LIBRARIES := $(staticLibraries)
+LOCAL_SHARED_LIBRARIES := $(sharedLibraries)
+LOCAL_MODULE := libposepredictor
+include $(BUILD_STATIC_LIBRARY)
+
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+ linear_pose_predictor_tests.cpp \
+
+LOCAL_STATIC_LIBRARIES := libposepredictor $(staticLibraries)
+LOCAL_SHARED_LIBRARIES := $(sharedLibraries)
+LOCAL_MODULE := pose_predictor_tests
+include $(BUILD_NATIVE_TEST)
diff --git a/libs/vr/libposepredictor/include/private/dvr/linear_pose_predictor.h b/libs/vr/libposepredictor/include/private/dvr/linear_pose_predictor.h
new file mode 100644
index 0000000..1efe938
--- /dev/null
+++ b/libs/vr/libposepredictor/include/private/dvr/linear_pose_predictor.h
@@ -0,0 +1,43 @@
+#ifndef ANDROID_DVR_POSE_PREDICTOR_H_
+#define ANDROID_DVR_POSE_PREDICTOR_H_
+
+#include <private/dvr/pose_predictor.h>
+
+namespace android {
+namespace dvr {
+
+// This class makes a linear prediction using the last two samples we received.
+class LinearPosePredictor : public PosePredictor {
+ public:
+ LinearPosePredictor() = default;
+
+ // Add a new sample.
+ void Add(const Sample& sample, DvrPoseAsync* out_pose) override;
+
+ // Predict using the last two samples.
+ void Predict(int64_t left_time_ns, int64_t right_time_ns,
+ DvrPoseAsync* out_pose) const override;
+
+ private:
+ // The index of the last sample we received.
+ size_t current_index_ = 0;
+
+ // The previous two samples.
+ Sample samples_[2];
+
+ // Experimental
+ bool forward_predict_angular_speed_ = false;
+
+ // Transient variables updated when a sample is added.
+ vec3d velocity_ = vec3d::Zero();
+ vec3d rotational_velocity_ = vec3d::Zero();
+ vec3d rotational_axis_ = vec3d::Zero();
+ double last_angular_speed_ = 0;
+ double angular_speed_ = 0;
+ double angular_accel_ = 0;
+};
+
+} // namespace dvr
+} // namespace android
+
+#endif // ANDROID_DVR_POSE_PREDICTOR_H_
diff --git a/libs/vr/libposepredictor/include/private/dvr/pose_predictor.h b/libs/vr/libposepredictor/include/private/dvr/pose_predictor.h
new file mode 100644
index 0000000..719edbe
--- /dev/null
+++ b/libs/vr/libposepredictor/include/private/dvr/pose_predictor.h
@@ -0,0 +1,47 @@
+#ifndef ANDROID_DVR_LINEAR_POSE_PREDICTOR_H_
+#define ANDROID_DVR_LINEAR_POSE_PREDICTOR_H_
+
+#include <private/dvr/pose_client_internal.h>
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+// This is an abstract base class for prediction 6dof pose given
+// a set of samples.
+//
+// TODO(okana): Create a framework for testing different subclasses for
+// performance and accuracy.
+class PosePredictor {
+ public:
+ PosePredictor() = default;
+ virtual ~PosePredictor() = default;
+
+ // Encapsulates a pose sample.
+ struct Sample {
+ vec3d position = vec3d::Zero();
+ quatd orientation = quatd::Identity();
+ int64_t time_ns = 0;
+ };
+
+ // Add a pose sample coming from the sensors.
+ // Returns this sample as a dvr pose.
+ //
+ // We will use the returned pose if prediction is not enabled.
+ virtual void Add(const Sample& sample, DvrPoseAsync* out_pose) = 0;
+
+ // Make a pose prediction for the left and right eyes at specific times.
+ virtual void Predict(int64_t left_time_ns, int64_t right_time_ns,
+ DvrPoseAsync* out_pose) const = 0;
+
+ // Helpers
+ static double NsToSeconds(int64_t time_ns) { return time_ns / 1e9; }
+ static int64_t SecondsToNs(double seconds) {
+ return static_cast<int64_t>(seconds * 1e9);
+ }
+};
+
+} // namespace dvr
+} // namespace android
+
+#endif // ANDROID_DVR_LINEAR_POSE_PREDICTOR_H_
diff --git a/libs/vr/libposepredictor/linear_pose_predictor.cpp b/libs/vr/libposepredictor/linear_pose_predictor.cpp
new file mode 100644
index 0000000..a2ce2ca
--- /dev/null
+++ b/libs/vr/libposepredictor/linear_pose_predictor.cpp
@@ -0,0 +1,147 @@
+#include <cutils/log.h>
+
+#include <private/dvr/linear_pose_predictor.h>
+
+namespace android {
+namespace dvr {
+
+using AngleAxisd = Eigen::AngleAxis<double>;
+
+void LinearPosePredictor::Add(const Sample& sample, DvrPoseAsync* out_pose) {
+ // If we are receiving a new sample, move the index to the next item.
+ // If the time stamp is the same as the last frame, we will just overwrite
+ // it with the new data.
+ if (sample.time_ns != samples_[current_index_].time_ns) {
+ current_index_ ^= 1;
+ }
+
+ // Save the sample.
+ samples_[current_index_] = sample;
+
+ // The previous sample we received.
+ const auto& previous_sample = samples_[current_index_ ^ 1];
+
+ // Ready to compute velocities.
+ const auto pose_delta_time =
+ NsToSeconds(sample.time_ns - previous_sample.time_ns);
+
+ const double inverse_dt = 1. / pose_delta_time;
+ if (pose_delta_time > 0.0) {
+ velocity_ = (sample.position - previous_sample.position) * inverse_dt;
+ } else {
+ velocity_ = vec3d::Zero();
+ }
+
+ quatd delta_q = sample.orientation.inverse() * previous_sample.orientation;
+ // Check that delta_q.w() == 1, Eigen doesn't respect this convention. If
+ // delta_q.w() == -1, we'll get the opposite velocity.
+ if (delta_q.w() < 0) {
+ delta_q.w() = -delta_q.w();
+ delta_q.vec() = -delta_q.vec();
+ }
+ rotational_velocity_ = -2.0 * delta_q.vec() * inverse_dt;
+
+ // Temporary experiment with acceleration estimate.
+ angular_speed_ = rotational_velocity_.norm();
+ angular_accel_ = 0.0;
+ if (forward_predict_angular_speed_) {
+ angular_accel_ =
+ pose_delta_time > 0.0
+ ? (angular_speed_ - last_angular_speed_) / pose_delta_time
+ : 0.0;
+ }
+ last_angular_speed_ = angular_speed_;
+
+ rotational_axis_ = vec3d(0.0, 1.0, 0.0);
+ if (angular_speed_ > 0.0) {
+ rotational_axis_ = rotational_velocity_ / angular_speed_;
+ }
+
+ out_pose->orientation = {static_cast<float>(sample.orientation.vec().x()),
+ static_cast<float>(sample.orientation.vec().y()),
+ static_cast<float>(sample.orientation.vec().z()),
+ static_cast<float>(sample.orientation.w())};
+
+ out_pose->translation = {static_cast<float>(sample.position.x()),
+ static_cast<float>(sample.position.y()),
+ static_cast<float>(sample.position.z()), 0.0f};
+
+ out_pose->right_orientation = {
+ static_cast<float>(sample.orientation.vec().x()),
+ static_cast<float>(sample.orientation.vec().y()),
+ static_cast<float>(sample.orientation.vec().z()),
+ static_cast<float>(sample.orientation.w())};
+
+ out_pose->right_translation = {static_cast<float>(sample.position.x()),
+ static_cast<float>(sample.position.y()),
+ static_cast<float>(sample.position.z()), 0.0f};
+
+ out_pose->angular_velocity = {static_cast<float>(rotational_velocity_.x()),
+ static_cast<float>(rotational_velocity_.y()),
+ static_cast<float>(rotational_velocity_.z()),
+ 0.0f};
+
+ out_pose->velocity = {static_cast<float>(velocity_.x()),
+ static_cast<float>(velocity_.y()),
+ static_cast<float>(velocity_.z()), 0.0f};
+ out_pose->timestamp_ns = sample.time_ns;
+ out_pose->flags = DVR_POSE_FLAG_HEAD | DVR_POSE_FLAG_VALID;
+ memset(out_pose->pad, 0, sizeof(out_pose->pad));
+}
+
+void LinearPosePredictor::Predict(int64_t left_time_ns, int64_t right_time_ns,
+ DvrPoseAsync* out_pose) const {
+ const auto& sample = samples_[current_index_];
+
+ double dt = NsToSeconds(left_time_ns - sample.time_ns);
+ double r_dt = NsToSeconds(right_time_ns - sample.time_ns);
+
+ // Temporary forward prediction code.
+ auto start_t_head_future = sample.position + velocity_ * dt;
+ auto r_start_t_head_future = sample.position + velocity_ * r_dt;
+ double angle = angular_speed_ * dt;
+ double r_angle = angular_speed_ * r_dt;
+ if (__builtin_expect(forward_predict_angular_speed_, 0)) {
+ angle += 0.5 * angular_accel_ * dt * dt;
+ r_angle += 0.5 * angular_accel_ * r_dt * r_dt;
+ }
+ auto start_q_head_future =
+ sample.orientation * quatd(AngleAxisd(angle, rotational_axis_));
+ auto r_start_q_head_future =
+ sample.orientation * quatd(AngleAxisd(r_angle, rotational_axis_));
+
+ out_pose->orientation = {static_cast<float>(start_q_head_future.x()),
+ static_cast<float>(start_q_head_future.y()),
+ static_cast<float>(start_q_head_future.z()),
+ static_cast<float>(start_q_head_future.w())};
+
+ out_pose->translation = {static_cast<float>(start_t_head_future.x()),
+ static_cast<float>(start_t_head_future.y()),
+ static_cast<float>(start_t_head_future.z()), 0.0f};
+
+ out_pose->right_orientation = {static_cast<float>(r_start_q_head_future.x()),
+ static_cast<float>(r_start_q_head_future.y()),
+ static_cast<float>(r_start_q_head_future.z()),
+ static_cast<float>(r_start_q_head_future.w())};
+
+ out_pose->right_translation = {static_cast<float>(r_start_t_head_future.x()),
+ static_cast<float>(r_start_t_head_future.y()),
+ static_cast<float>(r_start_t_head_future.z()),
+ 0.0f};
+
+ out_pose->angular_velocity = {static_cast<float>(rotational_velocity_.x()),
+ static_cast<float>(rotational_velocity_.y()),
+ static_cast<float>(rotational_velocity_.z()),
+ 0.0f};
+
+ out_pose->velocity = {static_cast<float>(velocity_.x()),
+ static_cast<float>(velocity_.y()),
+ static_cast<float>(velocity_.z()), 0.0f};
+
+ out_pose->timestamp_ns = left_time_ns;
+ out_pose->flags = DVR_POSE_FLAG_HEAD | DVR_POSE_FLAG_VALID;
+ memset(out_pose->pad, 0, sizeof(out_pose->pad));
+}
+
+} // namespace dvr
+} // namespace android
diff --git a/libs/vr/libposepredictor/linear_pose_predictor_tests.cpp b/libs/vr/libposepredictor/linear_pose_predictor_tests.cpp
new file mode 100644
index 0000000..6c4f58a
--- /dev/null
+++ b/libs/vr/libposepredictor/linear_pose_predictor_tests.cpp
@@ -0,0 +1,185 @@
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+#include <private/dvr/linear_pose_predictor.h>
+
+namespace android {
+namespace dvr {
+
+namespace {
+
+// For comparing expected and actual.
+constexpr double kAbsErrorTolerance = 1e-5;
+
+// The default rotation axis we will be using.
+const vec3d kRotationAxis = vec3d(1, 4, 3).normalized();
+
+// Linearly interpolate between a and b.
+vec3d lerp(const vec3d& a, const vec3d& b, double t) { return (b - a) * t + a; }
+
+// Linearly interpolate between two angles and return the resulting rotation as
+// a quaternion (around the kRotationAxis).
+quatd qlerp(double angle1, double angle2, double t) {
+ return quatd(
+ Eigen::AngleAxis<double>((angle2 - angle1) * t + angle1, kRotationAxis));
+}
+
+// Compare two positions.
+void TestPosition(const vec3d& expected, const float32x4_t& actual) {
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_NEAR(expected[i], static_cast<double>(actual[i]),
+ kAbsErrorTolerance);
+ }
+}
+
+// Compare two orientations.
+void TestOrientation(const quatd& expected, const float32x4_t& actual) {
+ // abs(expected.dot(actual)) > 1-eps
+ EXPECT_GE(std::abs(vec4d(actual[0], actual[1], actual[2], actual[3])
+ .dot(expected.coeffs())),
+ 0.99);
+}
+}
+
+// Test the extrapolation from two samples.
+TEST(LinearPosePredictorTest, Extrapolation) {
+ LinearPosePredictor predictor;
+
+ // We wil extrapolate linearly from [position|orientation] 1 -> 2.
+ const vec3d position1(0, 0, 0);
+ const vec3d position2(1, 2, 3);
+ const double angle1 = M_PI * 0.3;
+ const double angle2 = M_PI * 0.5;
+ const quatd orientation1(Eigen::AngleAxis<double>(angle1, kRotationAxis));
+ const quatd orientation2(Eigen::AngleAxis<double>(angle2, kRotationAxis));
+ const int64_t t1_ns = 0; //< First sample time stamp
+ const int64_t t2_ns = 10; //< The second sample time stamp
+ const int64_t eval_left_ns = 23; //< The eval time for left
+ const int64_t eval_right_ns = 31; //< The eval time for right
+ DvrPoseAsync start_pose, end_pose, extrapolated_pose;
+
+ predictor.Add(
+ PosePredictor::Sample{
+ .position = position1, .orientation = orientation1, .time_ns = t1_ns},
+ &start_pose);
+
+ // The start pose is passthough.
+ TestPosition(position1, start_pose.translation);
+ TestPosition(position1, start_pose.right_translation);
+ TestOrientation(orientation1, start_pose.orientation);
+ TestOrientation(orientation1, start_pose.right_orientation);
+ EXPECT_EQ(t1_ns, start_pose.timestamp_ns);
+
+ predictor.Add(
+ PosePredictor::Sample{
+ .position = position2, .orientation = orientation2, .time_ns = t2_ns},
+ &end_pose);
+
+ TestPosition(position2, end_pose.translation);
+ TestPosition(position2, end_pose.right_translation);
+ TestOrientation(orientation2, end_pose.orientation);
+ TestOrientation(orientation2, end_pose.right_orientation);
+ EXPECT_EQ(t2_ns, end_pose.timestamp_ns);
+
+ // Extrapolate from t1 - t2 to eval_[left/right].
+ predictor.Predict(eval_left_ns, eval_right_ns, &extrapolated_pose);
+
+ // The interpolation factors for left and right.
+ const auto left_t =
+ (eval_left_ns - t1_ns) / static_cast<double>(t2_ns - t1_ns);
+ EXPECT_EQ(2.3, left_t);
+
+ const auto right_t =
+ (eval_right_ns - t1_ns) / static_cast<double>(t2_ns - t1_ns);
+ EXPECT_EQ(3.1, right_t);
+
+ TestPosition(lerp(position1, position2, left_t),
+ extrapolated_pose.translation);
+ TestPosition(lerp(position1, position2, right_t),
+ extrapolated_pose.right_translation);
+ TestOrientation(qlerp(angle1, angle2, left_t), extrapolated_pose.orientation);
+ TestOrientation(qlerp(angle1, angle2, right_t),
+ extrapolated_pose.right_orientation);
+}
+
+// Test three samples, where the last two samples have the same timestamp.
+TEST(LinearPosePredictorTest, DuplicateSamples) {
+ LinearPosePredictor predictor;
+
+ const vec3d position1(0, 0, 0);
+ const vec3d position2(1, 2, 3);
+ const vec3d position3(2, 2, 3);
+ const double angle1 = M_PI * 0.3;
+ const double angle2 = M_PI * 0.5;
+ const double angle3 = M_PI * 0.65;
+ const quatd orientation1(Eigen::AngleAxis<double>(angle1, kRotationAxis));
+ const quatd orientation2(Eigen::AngleAxis<double>(angle2, kRotationAxis));
+ const quatd orientation3(Eigen::AngleAxis<double>(angle3, kRotationAxis));
+ const int64_t t1_ns = 0;
+ const int64_t t2_ns = 10;
+ const int64_t eval_left_ns = 27;
+ const int64_t eval_right_ns = 31;
+ DvrPoseAsync start_pose, end_pose, extrapolated_pose;
+
+ predictor.Add(
+ PosePredictor::Sample{
+ .position = position1, .orientation = orientation1, .time_ns = t1_ns},
+ &start_pose);
+
+ predictor.Add(
+ PosePredictor::Sample{
+ .position = position2, .orientation = orientation2, .time_ns = t2_ns},
+ &end_pose);
+
+ {
+ // Extrapolate from t1 - t2 to eval_[left/right].
+ predictor.Predict(eval_left_ns, eval_right_ns, &extrapolated_pose);
+
+ // The interpolation factors for left and right.
+ const auto left_t =
+ (eval_left_ns - t1_ns) / static_cast<double>(t2_ns - t1_ns);
+ const auto right_t =
+ (eval_right_ns - t1_ns) / static_cast<double>(t2_ns - t1_ns);
+
+ // Test the result.
+ TestPosition(lerp(position1, position2, left_t),
+ extrapolated_pose.translation);
+ TestPosition(lerp(position1, position2, right_t),
+ extrapolated_pose.right_translation);
+ TestOrientation(qlerp(angle1, angle2, left_t),
+ extrapolated_pose.orientation);
+ TestOrientation(qlerp(angle1, angle2, right_t),
+ extrapolated_pose.right_orientation);
+ }
+
+ // Sending a duplicate sample here.
+ predictor.Add(
+ PosePredictor::Sample{
+ .position = position3, .orientation = orientation3, .time_ns = t2_ns},
+ &end_pose);
+
+ {
+ // Extrapolate from t1 - t2 to eval_[left/right].
+ predictor.Predict(eval_left_ns, eval_right_ns, &extrapolated_pose);
+
+ // The interpolation factors for left and right.
+ const auto left_t =
+ (eval_left_ns - t1_ns) / static_cast<double>(t2_ns - t1_ns);
+ const auto right_t =
+ (eval_right_ns - t1_ns) / static_cast<double>(t2_ns - t1_ns);
+
+ // Test the result.
+ TestPosition(lerp(position1, position3, left_t),
+ extrapolated_pose.translation);
+ TestPosition(lerp(position1, position3, right_t),
+ extrapolated_pose.right_translation);
+ TestOrientation(qlerp(angle1, angle3, left_t),
+ extrapolated_pose.orientation);
+ TestOrientation(qlerp(angle1, angle3, right_t),
+ extrapolated_pose.right_orientation);
+ }
+}
+
+} // namespace dvr
+} // namespace android