LibMultiSense
LibMultiSense Documentation
Loading...
Searching...
No Matches
multisense_utilities_test.cc
Go to the documentation of this file.
1
37#include <cmath>
38
39#include <gtest/gtest.h>
40
42
43using namespace multisense;
44
45//
46// Create a disparity image of a circular disk centered in the image
47//
50 double obstacle_radius_m,
52 size_t width,
53 size_t height)
54{
55 const double left_fx = left_calibration.P[0][0];
56 const double left_cx = left_calibration.P[0][2];
57 const double left_fy = left_calibration.P[1][1];
58 const double left_cy = left_calibration.P[1][2];
59
60 const double right_fx = right_calibration.P[0][0];
61 const double right_cx = right_calibration.P[0][2];
62 const double tx = right_calibration.P[0][3] / right_calibration.P[0][0];
63
66
67 std::vector<uint8_t> data(width * height * sizeof(uint16_t), 0);
68
69 uint16_t* raw_data = reinterpret_cast<uint16_t*>(data.data());
70
71 for (size_t v = 0 ; v < height ; ++v)
72 {
73 for (size_t u = 0 ; u < width ; ++u)
74 {
75 // convert pixel to ray
76 const double ray_x = (u / left_fx) - (left_cx / left_fx);
77 const double ray_y = (v / left_fy) - (left_cy / left_fy);
78
79 // scale ray so that z = obstacle_distance_m and test if we are within the disc radius compute our disparity
81 {
82 // project into the right image. Note v will be the same
84
85 const uint16_t disparity = static_cast<uint16_t>((static_cast<double>(u) - right_u) * 16.0);
86
87 raw_data[v * width + u] = disparity;
88 }
89 }
90 }
91
92 return Image{std::make_shared<const std::vector<uint8_t>>(data),
93 0,
94 width * height * sizeof(uint16_t),
95 Image::PixelFormat::MONO16,
96 static_cast<int>(width),
97 static_cast<int>(height),
98 {},
99 {},
100 DataSource::LEFT_DISPARITY_RAW,
102}
103
104TEST(QMatrix, reproject)
105{
106 const float fx = 1500.0;
107 const float fy = 1000.0;
108 const float cx = 960.0;
109 const float cy = 600.0;
110 const float tx = -0.27;
111
113 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
114 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
115 {{{fx, 0.0, cx, 0.0}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
116 CameraCalibration::DistortionType::NONE,
117 {}};
118
120 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
121 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
122 {{{fx, 0.0, cx, fx * tx}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
123 CameraCalibration::DistortionType::NONE,
124 {}};
125
127
128 // Project a dummy point into the left/right camera
129 const double x = 0.5;
130 const double y = 1.5;
131 const double z = 5.5;
132
133 // Left image
134 const double left_u = ((fx * x) + (cx * z)) / z;
135 const double left_v = ((fy * y) + (cy * z)) / z;
136
137 // right image
138 const double right_u = ((fx * x) + (cx * z) + (tx * fx)) / z;
139
140 const double disparity = left_u - right_u;
141
142 const auto repojected_pt = q.reproject(Pixel{static_cast<size_t>(std::round(left_u)), static_cast<size_t>(std::round(left_v))}, disparity);
143
144 const double epsilon = 1e-2;
148}
149
151{
152 const float fx = 1000.0;
153 const float fy = 1000.0;
154 const float cx = 960.0;
155 const float cy = 600.0;
156 const float tx = -0.27;
157 const float aux_tx = -0.033;
158
160 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
161 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
162 {{{fx, 0.0, cx, 0.0}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
163 CameraCalibration::DistortionType::NONE,
164 {}};
165
167 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
168 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
169 {{{fx, 0.0, cx, fx * tx}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
170 CameraCalibration::DistortionType::NONE,
171 {}};
172
174 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
175 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
176 {{{fx, 0.0, cx, fx * aux_tx}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
177 CameraCalibration::DistortionType::NONE,
178 {}};
179
180 const double disk_distance_m = 4.0;
181
183
185 frame.add_image(disparity_image);
186
187 const float invalid = 7000.0;
188
190 Image::PixelFormat::FLOAT32,
191 DataSource::LEFT_DISPARITY_RAW,
192 false,
193 invalid);
194
196 Image::PixelFormat::FLOAT32,
197 DataSource::LEFT_DISPARITY_RAW,
198 true,
199 invalid);
200
202 Image::PixelFormat::MONO16,
203 DataSource::LEFT_DISPARITY_RAW,
204 false,
205 invalid);
206
210
211 for (int v = 0 ; v < disparity_image.height ; ++v)
212 {
213 for (int u = 0 ; u < disparity_image.width ; ++u)
214 {
215 if (disparity_image.at<uint16_t>(u, v).value() == 0)
216 {
217 EXPECT_DOUBLE_EQ(float_depth_image->at<float>(u, v).value(), invalid);
218 EXPECT_EQ(mono16_depth_image->at<uint16_t>(u, v).value(), invalid);
219 }
220 else
221 {
222 EXPECT_DOUBLE_EQ(float_depth_image->at<float>(u, v).value(), disk_distance_m);
223
224 // depths are in mm
225 EXPECT_EQ(mono16_depth_image->at<uint16_t>(u, v).value(), static_cast<uint16_t>(disk_distance_m * 1000));
226
227 // compute our aux pixel location for this depth measurement
228 const int aux_u = u - static_cast<int>((aux_tx/ tx) * disparity_image.at<uint16_t>(u, v).value() / 16.0);
229 EXPECT_EQ(aux_float_depth_image->at<float>(aux_u, v).value(), disk_distance_m);
230 }
231 }
232 }
233}
234
236{
237 const float fx = 1000.0;
238 const float fy = 1000.0;
239 const float cx = 960.0;
240 const float cy = 600.0;
241 const float tx = -0.27;
242 const float aux_tx = -0.033;
243
245 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
246 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
247 {{{fx, 0.0, cx, 0.0}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
248 CameraCalibration::DistortionType::NONE,
249 {}};
250
252 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
253 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
254 {{{fx, 0.0, cx, fx * tx}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
255 CameraCalibration::DistortionType::NONE,
256 {}};
257
259 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
260 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
261 {{{fx, 0.0, cx, fx * aux_tx}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
262 CameraCalibration::DistortionType::NONE,
263 {}};
264
265 const double disk_distance_m = 4.0;
266
268
270 frame.add_image(disparity_image);
271
272 const auto invalid_point = get_aux_3d_point(frame, Pixel{0, 0}, 100, 0.01);
273
275
276 const auto valid_point = get_aux_3d_point(frame, Pixel{static_cast<size_t>(cx), static_cast<size_t>(cy)}, 1000, 0.5);
277
279
281}
282
284{
285 const float fx = 1000.0;
286 const float fy = 1000.0;
287 const float cx = 960.0;
288 const float cy = 600.0;
289
291 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
292 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
293 {{{fx, 0.0, cx, 0.0}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
294 CameraCalibration::DistortionType::NONE,
295 {}};
296
297 const size_t width = 1920;
298 const size_t height = 1200;
299
300 const uint8_t val = 42;
301
302 std::vector<uint8_t> y_data(width * height, val);
303
304 // Values of 128 for cb/cr result in 0 values for the corresponding color pixels
305 std::vector<uint8_t> cbcr_data(width * height/2, 128);
306
307 const Image y{std::make_shared<const std::vector<uint8_t>>(std::move(y_data)),
308 0,
309 width * height,
310 Image::PixelFormat::MONO8,
311 static_cast<int>(width),
312 static_cast<int>(height),
313 {},
314 {},
315 DataSource::AUX_LUMA_RAW,
317
318 const Image cbcr{std::make_shared<const std::vector<uint8_t>>(std::move(cbcr_data)),
319 0,
320 width / 2 * height / 2,
321 Image::PixelFormat::MONO16,
322 static_cast<int>(width/2),
323 static_cast<int>(height/2),
324 {},
325 {},
326 DataSource::AUX_CHROMA_RAW,
328
329 const auto bgr_image = create_bgr_from_ycbcr420(y, cbcr, DataSource::AUX_RAW);
330
332 ASSERT_EQ(bgr_image->width , static_cast<int>(width));
333 ASSERT_EQ(bgr_image->height , static_cast<int>(height));
334
335 for (size_t h = 0 ; h < height ; ++h)
336 {
337 for (size_t w = 0 ; w < width ; ++w)
338 {
339 const auto pixel = bgr_image->at<std::array<uint8_t, 3>>(w, h);
341 ASSERT_EQ(pixel->at(0), val);
342 ASSERT_EQ(pixel->at(1), val);
343 ASSERT_EQ(pixel->at(2), val);
344 }
345 }
346}
347
349{
350 const float fx = 1000.0;
351 const float fy = 1000.0;
352 const float cx = 960.0;
353 const float cy = 600.0;
354 const float tx = -0.27;
355
357 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
358 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
359 {{{fx, 0.0, cx, 0.0}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
360 CameraCalibration::DistortionType::NONE,
361 {}};
362
364 {{{fx, 0.0, cx}, {0.0, fy, cy}, {0.0, 0.0, 1.0}}},
365 {{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}}},
366 {{{fx, 0.0, cx, fx * tx}, {0.0, fy, cy, 0.0}, {0.0, 0.0, 1.0, 0.0}}},
367 CameraCalibration::DistortionType::NONE,
368 {}};
369
370 const double disk_distance_m = 4.0;
371
373
374 ImageFrame frame{0, {}, StereoCalibration{left_calibration, right_calibration, std::nullopt}, {}, {}, {}, {}, {}, {}};
375 frame.add_image(disparity_image);
376
378
380 ASSERT_TRUE(!point_cloud->cloud.empty());
381
382 for (const auto &point : point_cloud->cloud)
383 {
385 }
386
388
390 ASSERT_TRUE(point_cloud_empty->cloud.empty());
391}
Copyright 2013-2025 Carnegie Robotics, LLC 4501 Hatfield Street, Pittsburgh, PA 15201 http://www....
Point< void > reproject(const Pixel &pixel, double disparity) const
Image create_example_disparity_image(const CameraCalibration &left_calibration, const CameraCalibration &right_calibration, double obstacle_radius_m, double obstacle_distance_m, size_t width, size_t height)
TEST(QMatrix, reproject)
MULTISENSE_API std::optional< Point< void > > get_aux_3d_point(const ImageFrame &frame, const Pixel &rectified_aux_pixel, size_t max_pixel_search_window, double pixel_epsilon, const DataSource &disparity_source=DataSource::LEFT_DISPARITY_RAW)
for a given pixel in the aux image, return the corresponding 3D point associated with the aux pixel
MULTISENSE_API std::optional< PointCloud< Color > > create_color_pointcloud(const Image &disparity, const std::optional< Image > &color, double max_range, const StereoCalibration &calibration)
Create a point cloud from a image frame and a color source.
MULTISENSE_API std::optional< Image > create_bgr_from_ycbcr420(const Image &luma, const Image &chroma, const DataSource &output_source)
Convert a YCbCr420 luma + chroma image into a BGR color image.
MULTISENSE_API std::optional< Image > create_depth_image(const ImageFrame &frame, const Image::PixelFormat &depth_format, const DataSource &disparity_source=DataSource::LEFT_DISPARITY_RAW, bool compute_in_aux_frame=false, float invalid_value=0)
Create a depth image from a image frame.
MULTISENSE_API std::optional< PointCloud< void > > create_pointcloud(const ImageFrame &frame, double max_range, const DataSource &disparity_source=DataSource::LEFT_DISPARITY_RAW)
A frame containing multiple images (indexed by DataSource).
Represents a single image plus metadata.
Pixel coordinates in a image.