blob: d6bf1644adcdf6ddccd27c5c42c400a6fcee1a47 [file] [log] [blame]
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001#include "include/private/dvr/composite_hmd.h"
2
Alex Vakulenko4fe60582017-02-02 11:35:59 -08003#include <log/log.h>
4
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08005#include <private/dvr/numeric.h>
6
7namespace android {
8namespace dvr {
9
10CompositeHmd::CompositeHmd(const HeadMountMetrics& head_mount_metrics,
11 const DisplayMetrics& display_metrics)
12 : head_mount_metrics_(head_mount_metrics),
13 display_metrics_(display_metrics) {
14 MetricsChanged();
15}
16
17float CompositeHmd::GetTargetFrameDuration() const {
18 return display_metrics_.GetFrameDurationSeconds();
19}
20
21vec2 CompositeHmd::ComputeDistortedPoint(EyeType eye, vec2 position,
22 RgbColorChannel channel) const {
23 position = TransformPoint(eye_tan_angle_from_norm_screen_matrix_[eye], position);
24 vec2 distorted =
25 head_mount_metrics_.GetColorChannelDistortion(channel).Distort(position);
26 return TransformPoint(eye_norm_texture_from_tan_angle_matrix_[eye], distorted);
27}
28
29vec2 CompositeHmd::ComputeInverseDistortedPoint(EyeType eye, vec2 position,
30 RgbColorChannel channel) const {
31 position = TransformPoint(eye_norm_texture_from_tan_angle_inv_matrix_[eye], position);
32 vec2 distorted =
33 head_mount_metrics_.GetColorChannelDistortion(channel).DistortInverse(
34 position);
35 return TransformPoint(eye_tan_angle_from_norm_screen_inv_matrix_[eye], distorted);
36}
37
38void CompositeHmd::ComputeDistortedVertex(EyeType eye, vec2 uv_in,
39 vec2* vertex_out,
40 vec2* uv_out) const {
41 // The mesh vertices holds the shape of the distortion.
42 vec2 vertex_position = ComputeInverseDistortedPoint(eye, uv_in, kRed);
43 *vertex_out = vec2(vertex_position.x() - 0.5f, vertex_position.y() - 0.5f);
44
45 if (uv_out) {
46 // Compute the texture coordinate for each vertex coordinate.
47 // Red's is the inverse of the inverse, skip the calculation and use uv_in.
48 uv_out[kRed] = uv_in;
49 uv_out[kGreen] = ComputeDistortedPoint(eye, vertex_position, kGreen);
50 uv_out[kBlue] = ComputeDistortedPoint(eye, vertex_position, kBlue);
51 }
52}
53
54vec2i CompositeHmd::GetRecommendedRenderTargetSize() const {
55 return recommended_render_target_size_;
56}
57
58Range2i CompositeHmd::GetDisplayRange() const { return display_range_; }
59
60mat4 CompositeHmd::GetEyeFromHeadMatrix(EyeType eye) const {
61 return eye_from_head_matrix_[eye];
62}
63
64FieldOfView CompositeHmd::GetEyeFov(EyeType eye) const { return eye_fov_[eye]; }
65
66Range2i CompositeHmd::GetEyeViewportBounds(EyeType eye) const {
67 return eye_viewport_range_[eye];
68}
69
70void CompositeHmd::SetHeadMountMetrics(
71 const HeadMountMetrics& head_mount_metrics) {
72 // Use the assignement operator to do memberwise copy.
73 head_mount_metrics_ = head_mount_metrics;
74 MetricsChanged();
75}
76
77const HeadMountMetrics& CompositeHmd::GetHeadMountMetrics() const {
78 return head_mount_metrics_;
79}
80
81void CompositeHmd::SetDisplayMetrics(const DisplayMetrics& display_metrics) {
82 // Use the assignment operator to do memberwise copy.
83 display_metrics_ = display_metrics;
84 MetricsChanged();
85}
86
87const DisplayMetrics& CompositeHmd::GetDisplayMetrics() const {
88 return display_metrics_;
89}
90
91void CompositeHmd::MetricsChanged() {
92 // Abbreviations in variable names:
93 // "vp": viewport
94 // "ta": tan-angle
95 const HeadMountMetrics& mount = head_mount_metrics_;
96 DisplayMetrics display = display_metrics_;
97
98 if (display.IsPortrait()) {
99 // If we're in portrait mode, toggle the orientation so that all
100 // calculations are done in landscape mode.
101 display.ToggleOrientation();
102 }
103
104 float display_width_meters = display.GetSizeMeters()[0];
105 float display_height_meters = display.GetSizeMeters()[1];
106
107 vec2 pixels_per_meter = vec2(1.0f / display.GetMetersPerPixel()[0],
108 1.0f / display.GetMetersPerPixel()[1]);
109
110 // virtual_eye_to_screen_dist is the distance from the screen to the eye
111 // after it has been projected through the lens. This would normally be
112 // slightly different from the distance to the actual eye.
113 float virtual_eye_to_screen_dist = mount.GetVirtualEyeToScreenDistance();
114 float meters_per_tan_angle = virtual_eye_to_screen_dist;
115 vec2 pixels_per_tan_angle = pixels_per_meter * meters_per_tan_angle;
116
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800117 LOG_ALWAYS_FATAL_IF(0.0f == display_width_meters);
118 LOG_ALWAYS_FATAL_IF(0.0f == display_height_meters);
119 LOG_ALWAYS_FATAL_IF(0.0f == virtual_eye_to_screen_dist);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800120
121 // Height of lenses from the bottom of the screen.
122 float lens_y_center = 0;
123 float bottom_dist = 0;
124 float top_dist = 0;
125
126 // bottom_display_dist and top_display_dist represent the distance from the
127 // lens center to the edge of the display.
128 float bottom_display_dist = 0;
129 float top_display_dist = 0;
130 switch (mount.GetVerticalAlignment()) {
131 case HeadMountMetrics::kBottom:
132 lens_y_center =
133 mount.GetTrayToLensDistance() - display.GetBorderSizeMeters();
134 bottom_dist = lens_y_center;
135 top_dist = lens_y_center;
136 bottom_display_dist = lens_y_center;
137 top_display_dist = display_height_meters - lens_y_center;
138 break;
139 case HeadMountMetrics::kCenter:
140 // TODO(hendrikw): This should respect the border size, but since we
141 // currently hard code the border size, it would break
142 // the distortion on some devices. Revisit when border
143 // size is fixed.
144 lens_y_center = display_height_meters * 0.5f;
145 bottom_dist = lens_y_center;
146 top_dist = lens_y_center;
147 bottom_display_dist = lens_y_center;
148 top_display_dist = lens_y_center;
149 break;
150 case HeadMountMetrics::kTop:
151 lens_y_center = display_height_meters - (mount.GetTrayToLensDistance() -
152 display.GetBorderSizeMeters());
153 bottom_dist =
154 mount.GetTrayToLensDistance() - display.GetBorderSizeMeters();
155 top_dist = bottom_dist;
156 bottom_display_dist = lens_y_center;
157 top_display_dist = display_height_meters - lens_y_center;
158 break;
159 }
160
161 float inner_dist = mount.GetScreenCenterToLensDistance();
162 float outer_dist = display_width_meters * 0.5f - inner_dist;
163
164 // We don't take chromatic aberration into account yet for computing FOV,
165 // viewport, etc, so we only use the green channel for now. Note the actual
166 // Distort function *does* implement chromatic aberration.
167 const ColorChannelDistortion& distortion =
168 mount.GetColorChannelDistortion(kGreen);
169
170 vec2 outer_point(outer_dist / virtual_eye_to_screen_dist, 0.0f);
171 vec2 inner_point(inner_dist / virtual_eye_to_screen_dist, 0.0f);
172 vec2 bottom_point(0.0f, bottom_dist / virtual_eye_to_screen_dist);
173 vec2 top_point(0.0f, top_dist / virtual_eye_to_screen_dist);
174
175 float outer_angle = atanf(distortion.Distort(outer_point)[0]);
176 float inner_angle = atanf(distortion.Distort(inner_point)[0]);
177 float bottom_angle = atanf(distortion.Distort(bottom_point)[1]);
178 float top_angle = atanf(distortion.Distort(top_point)[1]);
179
180 for (EyeType eye : {kLeftEye, kRightEye}) {
181 const FieldOfView max_fov = mount.GetEyeMaxFov(eye);
182 float left_angle = (eye == kLeftEye) ? outer_angle : inner_angle;
183 float right_angle = (eye == kLeftEye) ? inner_angle : outer_angle;
184
185 eye_fov_[eye] = FieldOfView(std::min(left_angle, max_fov.GetLeft()),
186 std::min(right_angle, max_fov.GetRight()),
187 std::min(bottom_angle, max_fov.GetBottom()),
188 std::min(top_angle, max_fov.GetTop()));
189
190 vec2 texture_vp_ta_p1 =
191 vec2(-tanf(eye_fov_[eye].GetLeft()), -tanf(eye_fov_[eye].GetBottom()));
192 vec2 texture_vp_ta_p2 =
193 vec2(tanf(eye_fov_[eye].GetRight()), tanf(eye_fov_[eye].GetTop()));
194 vec2 texture_vp_size_ta = texture_vp_ta_p2 - texture_vp_ta_p1;
195
196 vec2 texture_vp_sizef_pixels =
197 texture_vp_size_ta.array() * pixels_per_tan_angle.array();
198
199 vec2i texture_vp_size_pixels =
200 vec2i(static_cast<int32_t>(roundf(texture_vp_sizef_pixels[0])),
201 static_cast<int32_t>(roundf(texture_vp_sizef_pixels[1])));
202 int vp_start_x =
203 (eye == kLeftEye) ? 0 : eye_viewport_range_[kLeftEye].p2[0];
204
205 eye_viewport_range_[eye] =
206 Range2i::FromSize(vec2i(vp_start_x, 0), texture_vp_size_pixels);
207 float left_dist = (eye == kLeftEye) ? outer_dist : inner_dist;
208 float right_dist = (eye == kLeftEye) ? inner_dist : outer_dist;
209 vec2 screen_ta_p1(-left_dist / virtual_eye_to_screen_dist,
210 -bottom_display_dist / virtual_eye_to_screen_dist);
211 vec2 screen_ta_p2(right_dist / virtual_eye_to_screen_dist,
212 top_display_dist / virtual_eye_to_screen_dist);
213 vec2 screen_ta_size = screen_ta_p2 - screen_ta_p1;
214
215 // Align the tan angle coordinates to the nearest pixel. This will ensure
216 // that the optical center doesn't straddle multiple pixels.
217 // TODO(hendrikw): verify that this works correctly for Daydream View.
218 vec2 tan_angle_per_pixel(screen_ta_size.array() /
219 texture_vp_size_pixels.cast<float>().array());
220 vec2 pixel_p1(screen_ta_p1.array() / tan_angle_per_pixel.array());
221 vec2 pixel_shift(roundf(pixel_p1.x()) - pixel_p1.x(),
222 roundf(pixel_p1.y()) - pixel_p1.y());
223 screen_ta_p1 +=
224 (tan_angle_per_pixel.array() * pixel_shift.array()).matrix();
225 screen_ta_p2 +=
226 (tan_angle_per_pixel.array() * pixel_shift.array()).matrix();
227
228 // Calculate the transformations needed for the distortions.
229 eye_tan_angle_from_norm_screen_matrix_[eye] =
230 TranslationMatrix(vec2(screen_ta_p1)) *
231 ScaleMatrix(screen_ta_size);
232 eye_tan_angle_from_norm_screen_inv_matrix_[eye] =
233 eye_tan_angle_from_norm_screen_matrix_[eye].inverse();
234
235 eye_norm_texture_from_tan_angle_inv_matrix_[eye] =
236 TranslationMatrix(texture_vp_ta_p1) *
237 ScaleMatrix(texture_vp_size_ta);
238 eye_norm_texture_from_tan_angle_matrix_[eye] =
239 eye_norm_texture_from_tan_angle_inv_matrix_[eye].inverse();
240 }
241 vec2i left_vp_size = eye_viewport_range_[kLeftEye].GetSize();
242 vec2i right_vp_size = eye_viewport_range_[kRightEye].GetSize();
243
244 recommended_render_target_size_ =
245 vec2i(left_vp_size[0] + right_vp_size[0],
246 std::max(left_vp_size[1], right_vp_size[1]));
247
248 display_range_ = Range2i::FromSize(vec2i(0, 0), display.GetSizePixels());
249
250 eye_from_head_matrix_[kLeftEye] = Eigen::Translation3f(
251 vec3(mount.GetScreenCenterToLensDistance(), 0.0f, 0.0f));
252 eye_from_head_matrix_[kRightEye] = Eigen::Translation3f(
253 vec3(-mount.GetScreenCenterToLensDistance(), 0.0f, 0.0f));
254}
255
256} // namespace dvr
257} // namespace android