The open source OpenXR runtime
1// Copyright 2021-2022, Collabora, Ltd.
2// SPDX-License-Identifier: BSL-1.0
3/*!
4 * @file
5 * @brief DepthAI frameserver implementation.
6 * @author Moshi Turner <moshiturner@protonmail.com>
7 * @author Jakob Bornecrantz <jakob@collabora.com>
8 * @ingroup drv_depthai
9 */
10
11#include "xrt/xrt_tracking.h"
12
13#include "os/os_time.h"
14#include "os/os_threading.h"
15
16#include "math/m_api.h"
17#include "math/m_vec3.h"
18
19#include "util/u_sink.h"
20#include "util/u_var.h"
21#include "util/u_misc.h"
22#include "util/u_debug.h"
23#include "util/u_frame.h"
24#include "util/u_format.h"
25#include "util/u_logging.h"
26#include "util/u_trace_marker.h"
27
28#ifdef XRT_OS_LINUX
29#include "util/u_linux.h"
30#endif
31
32#include "tracking/t_tracking.h"
33
34#include "depthai_interface.h"
35
36#include "depthai/depthai.hpp"
37
38#include <stdio.h>
39#include <assert.h>
40#include <unistd.h>
41#include <pthread.h>
42
43#include <memory>
44#include <sstream>
45
46/*
47 *
48 * Printing functions.
49 *
50 */
51
52#define DEPTHAI_TRACE(d, ...) U_LOG_IFL_T(d->log_level, __VA_ARGS__)
53#define DEPTHAI_DEBUG(d, ...) U_LOG_IFL_D(d->log_level, __VA_ARGS__)
54#define DEPTHAI_INFO(d, ...) U_LOG_IFL_I(d->log_level, __VA_ARGS__)
55#define DEPTHAI_WARN(d, ...) U_LOG_IFL_W(d->log_level, __VA_ARGS__)
56#define DEPTHAI_ERROR(d, ...) U_LOG_IFL_E(d->log_level, __VA_ARGS__)
57
58DEBUG_GET_ONCE_LOG_OPTION(depthai_log, "DEPTHAI_LOG", U_LOGGING_INFO)
59DEBUG_GET_ONCE_NUM_OPTION(depthai_floodlight_brightness, "DEPTHAI_FLOODLIGHT_BRIGHTNESS", 1000)
60DEBUG_GET_ONCE_NUM_OPTION(depthai_startup_wait_frames, "DEPTHAI_STARTUP_WAIT_FRAMES", 0)
61DEBUG_GET_ONCE_NUM_OPTION(depthai_imu_hz, "DEPTHAI_IMU_HZ", 500)
62DEBUG_GET_ONCE_NUM_OPTION(depthai_imu_batch_size, "DEPTHAI_IMU_BATCH_SIZE", 2)
63DEBUG_GET_ONCE_NUM_OPTION(depthai_imu_max_batch_size, "DEPTHAI_IMU_MAX_BATCH_SIZE", 2)
64
65
66
67/*
68 *
69 * Helper frame wrapper code.
70 *
71 */
72
73extern "C" void
74depthai_frame_wrapper_destroy(struct xrt_frame *xf);
75
76/*!
77 * Manage dai::ImgFrame life-time.
78 */
79class DepthAIFrameWrapper
80{
81public:
82 struct xrt_frame frame = {};
83
84 std::shared_ptr<dai::ImgFrame> depthai_frame = {};
85
86
87public:
88 DepthAIFrameWrapper(std::shared_ptr<dai::ImgFrame> depthai_frame)
89 {
90 this->frame.reference.count = 1;
91 this->frame.destroy = depthai_frame_wrapper_destroy;
92 this->depthai_frame = depthai_frame;
93 }
94};
95
96extern "C" void
97depthai_frame_wrapper_destroy(struct xrt_frame *xf)
98{
99 DepthAIFrameWrapper *dfw = (DepthAIFrameWrapper *)xf;
100 delete dfw;
101}
102
103
104/*
105 *
106 * DepthAI frameserver.
107 *
108 */
109
110enum depthai_camera_type
111{
112 RGB_IMX_378,
113 RGB_OV_9782,
114 GRAY_OV_9282_L,
115 GRAY_OV_9282_R,
116 GRAY_OV_7251_L,
117 GRAY_OV_7251_R,
118};
119
120/*!
121 * DepthAI frameserver support the Luxonis Oak devices.
122 *
123 * @ingroup drv_depthai
124 */
125struct depthai_fs
126{
127 struct xrt_fs base;
128 struct xrt_frame_node node;
129 struct os_thread_helper image_thread;
130 struct os_thread_helper imu_thread;
131
132 u_logging_level log_level;
133
134 uint32_t width;
135 uint32_t height;
136 xrt_format format;
137
138 // Sink:, RGB, Left, Right, CamC.
139 xrt_frame_sink *sink[4];
140 xrt_imu_sink *imu_sink;
141
142 struct u_sink_debug debug_sinks[4];
143
144 dai::Device *device;
145 dai::DataOutputQueue *image_queue;
146 dai::DataOutputQueue *imu_queue;
147
148 dai::DataInputQueue *control_queue;
149
150 dai::ColorCameraProperties::SensorResolution color_sensor_resolution;
151 dai::ColorCameraProperties::ColorOrder color_order;
152
153 dai::MonoCameraProperties::SensorResolution grayscale_sensor_resolution;
154 dai::CameraBoardSocket camera_board_socket;
155
156 dai::CameraImageOrientation image_orientation;
157
158
159 uint32_t fps;
160 bool interleaved;
161 bool oak_d_lite;
162
163 struct
164 {
165 bool has;
166 bool manual_control;
167
168 u_var_draggable_f32 mA;
169 float last_mA;
170 } floodlights;
171
172 struct
173 {
174 bool active;
175 // Remember, these hold a pointer to a value!
176 u_var_draggable_u16 exposure_time_ui;
177 u_var_draggable_u16 iso_ui;
178
179 uint16_t exposure_time;
180 uint16_t iso;
181
182 uint16_t last_exposure_time;
183 uint16_t last_iso;
184 } manual_exposure;
185
186
187 bool want_cameras;
188 bool want_imu;
189 bool half_size_ov9282;
190
191 uint32_t first_frames_idx;
192 uint32_t first_frames_camera_to_watch;
193};
194
195
196/*
197 *
198 * Internal functions.
199 *
200 */
201
202static bool
203depthai_get_gray_cameras_calibration(struct depthai_fs *depthai, struct t_stereo_camera_calibration **c_ptr)
204{
205 /*
206 * Read out values.
207 */
208
209 std::vector<std::vector<float>> extrinsics = {};
210 struct
211 {
212 std::vector<std::vector<float>> intrinsics = {};
213 std::vector<float> distortion = {};
214 int width = -1, height = -1;
215 } left, right = {};
216
217
218 /*
219 * Get data.
220 */
221
222 // Try to create a device and see if that fail first.
223 dai::CalibrationHandler calibData;
224 try {
225 calibData = depthai->device->readCalibration();
226 std::tie(left.intrinsics, left.width, left.height) =
227 calibData.getDefaultIntrinsics(dai::CameraBoardSocket::LEFT);
228 std::tie(right.intrinsics, right.width, right.height) =
229 calibData.getDefaultIntrinsics(dai::CameraBoardSocket::RIGHT);
230 left.distortion = calibData.getDistortionCoefficients(dai::CameraBoardSocket::LEFT);
231 right.distortion = calibData.getDistortionCoefficients(dai::CameraBoardSocket::RIGHT);
232 extrinsics = calibData.getCameraExtrinsics(dai::CameraBoardSocket::LEFT, dai::CameraBoardSocket::RIGHT);
233 } catch (std::exception &e) {
234 std::string what = e.what();
235 U_LOG_E("DepthAI error: %s", what.c_str());
236 return false;
237 }
238
239
240 /*
241 * Copy to the Monado calibration struct.
242 */
243
244
245 // Good enough assumption that they're using the same distortion model
246 enum t_camera_distortion_model type = T_DISTORTION_OPENCV_RADTAN_14;
247 if (calibData.getDistortionModel(dai::CameraBoardSocket::LEFT) == dai::CameraModel::Fisheye) {
248 type = T_DISTORTION_FISHEYE_KB4;
249 }
250
251 uint32_t num_dist = t_num_params_from_distortion_model(type);
252
253 struct t_stereo_camera_calibration *c = NULL;
254 t_stereo_camera_calibration_alloc(&c, type);
255
256 // Copy intrinsics
257 c->view[0].image_size_pixels.w = left.width;
258 c->view[0].image_size_pixels.h = left.height;
259 c->view[1].image_size_pixels.w = right.width;
260 c->view[1].image_size_pixels.h = right.height;
261 for (uint32_t row = 0; row < 3; row++) {
262 for (uint32_t col = 0; col < 3; col++) {
263 c->view[0].intrinsics[row][col] = left.intrinsics[row][col];
264 c->view[1].intrinsics[row][col] = right.intrinsics[row][col];
265 }
266 }
267
268 c->view[0].distortion_model = type;
269 c->view[1].distortion_model = type;
270 for (uint32_t i = 0; i < num_dist; i++) {
271 c->view[0].distortion_parameters_as_array[i] = left.distortion[i];
272 c->view[1].distortion_parameters_as_array[i] = right.distortion[i];
273 }
274
275 // Copy translation
276 for (uint32_t i = 0; i < 3; i++) {
277 // Is in centimeters, odd. Monado uses meters.
278 c->camera_translation[i] = extrinsics[i][3] / 100.0f;
279 }
280
281 // Copy rotation
282 for (uint32_t row = 0; row < 3; row++) {
283 for (uint32_t col = 0; col < 3; col++) {
284 c->camera_rotation[row][col] = extrinsics[row][col];
285 }
286 }
287
288 // To properly handle ref counting.
289 t_stereo_camera_calibration_reference(c_ptr, c);
290 t_stereo_camera_calibration_reference(&c, NULL);
291
292 return true;
293}
294
295//!@todo this function will look slightly different for an OAK-D Pro with dot projectors - mine only has floodlights
296void
297depthai_guess_ir_drivers(struct depthai_fs *depthai)
298{
299 std::vector<std::tuple<std::string, int, int>> list_of_drivers = depthai->device->getIrDrivers();
300 depthai->floodlights.has = false;
301
302 for (std::tuple<std::string, int, int> elem : list_of_drivers) {
303 if (std::get<0>(elem) == "LM3644") {
304 DEPTHAI_DEBUG(depthai, "DepthAI: Found an IR floodlight");
305 depthai->floodlights.has = true;
306 }
307 }
308
309 if (!depthai->floodlights.has) {
310 DEPTHAI_DEBUG(depthai, "DepthAI: Didn't find any IR illuminators");
311 }
312}
313
314static void
315depthai_guess_camera_type(struct depthai_fs *depthai)
316{
317 // We could be a lot more pedantic here, but let's just not.
318 // For now, ov7251 == oak-d lite, and ov9282 == oak-d/oak-d S2/oak-d pro
319 std::ostringstream oss = {};
320 std::vector<dai::CameraBoardSocket> sockets = depthai->device->getConnectedCameras();
321 std::unordered_map<dai::CameraBoardSocket, std::string> sensornames = depthai->device->getCameraSensorNames();
322
323 bool ov9282 = false;
324
325 bool ov7251 = false;
326
327
328
329 for (size_t i = 0; i < sockets.size(); i++) {
330 dai::CameraBoardSocket sock = sockets[i];
331 std::string sensorname = sensornames.at(sock);
332 if (sensorname == "OV9282" || sensorname == "OV9*82") {
333 ov9282 = true;
334 } else if (sensorname == "OV7251") {
335 ov7251 = true;
336 }
337 oss << "'" << static_cast<int>(sock) << "': " << sensorname << ", ";
338 }
339
340
341 std::string str = oss.str();
342
343 DEPTHAI_DEBUG(depthai, "DepthAI: Connected cameras: %s", str.c_str());
344
345 if (ov9282 && !ov7251) {
346 // OAK-D
347 DEPTHAI_DEBUG(depthai, "DepthAI: Found an OAK-D!");
348 depthai->oak_d_lite = false;
349 } else if (ov7251 && !ov9282) {
350 // OAK-D Lite
351 DEPTHAI_DEBUG(depthai, "DepthAI: Found and OAK-D Lite!");
352 depthai->oak_d_lite = true;
353 } else {
354 DEPTHAI_WARN(depthai,
355 "DepthAI: Not sure what kind of device this is - going to pretend this is an OAK-D.");
356 depthai->oak_d_lite = false;
357 }
358}
359
360static void
361depthai_print_calib(struct depthai_fs *depthai)
362{
363 if (depthai->log_level > U_LOGGING_DEBUG) {
364 return;
365 }
366
367 struct t_stereo_camera_calibration *c = NULL;
368
369 if (!depthai_get_gray_cameras_calibration(depthai, &c)) {
370 return;
371 }
372
373 t_stereo_camera_calibration_dump(c);
374 t_stereo_camera_calibration_reference(&c, NULL);
375}
376
377
378static void
379depthai_do_one_frame(struct depthai_fs *depthai)
380{
381 std::shared_ptr<dai::ImgFrame> imgFrame = depthai->image_queue->get<dai::ImgFrame>();
382 if (!imgFrame) {
383 std::cout << "Not ImgFrame" << std::endl;
384 return; // Nothing to do.
385 }
386
387 // Trace-marker here for timing after we have gotten a frame.
388 SINK_TRACE_IDENT(depthai_frame);
389
390
391 // Get the timestamp.
392 auto duration = imgFrame->getTimestamp().time_since_epoch();
393 uint32_t num = imgFrame->getInstanceNum();
394 auto nano = std::chrono::duration_cast<std::chrono::duration<int64_t, std::nano>>(duration);
395 uint64_t timestamp_ns = nano.count();
396
397 if (num >= ARRAY_SIZE(depthai->sink)) {
398 DEPTHAI_ERROR(depthai, "Instance number too large! (%u)", num);
399 return;
400 }
401
402 if (depthai->sink[num] == nullptr) {
403 DEPTHAI_ERROR(depthai, "No sink waiting for frame! (%u)", num);
404 return;
405 }
406
407 if (depthai->first_frames_idx < debug_get_num_option_depthai_startup_wait_frames()) {
408 if (depthai->first_frames_idx == 0) {
409 depthai->first_frames_camera_to_watch = num;
410 }
411 if (num != depthai->first_frames_camera_to_watch) {
412 return;
413 }
414 depthai->first_frames_idx++;
415 return;
416 }
417
418 // Create a wrapper that will keep the frame alive as long as the frame was alive.
419 DepthAIFrameWrapper *dfw = new DepthAIFrameWrapper(imgFrame);
420
421 // Fill in all of the data.
422 struct xrt_frame *xf = &dfw->frame;
423 xf->width = depthai->width;
424 xf->height = depthai->height;
425 xf->format = depthai->format;
426 xf->timestamp = timestamp_ns;
427 xf->data = imgFrame->getData().data();
428
429 // Calculate stride and size, assuming tightly packed rows.
430 u_format_size_for_dimensions(xf->format, xf->width, xf->height, &xf->stride, &xf->size);
431
432 // Push the frame to the sink.
433 xrt_sink_push_frame(depthai->sink[num], xf);
434 u_sink_debug_push_frame(&depthai->debug_sinks[num], xf);
435
436 // If downstream wants to keep the frame they would have referenced it.
437 xrt_frame_reference(&xf, NULL);
438}
439
440static void
441depthai_maybe_send_exposure_command(struct depthai_fs *depthai)
442{
443 if (!depthai->manual_exposure.active) {
444 return;
445 }
446
447 // If the user hasn't changed the exposure values since last we sent a command, we don't need to send a new one.
448 if (depthai->manual_exposure.last_exposure_time == depthai->manual_exposure.exposure_time && //
449 depthai->manual_exposure.last_iso == depthai->manual_exposure.iso) {
450 return;
451 }
452
453 dai::CameraControl ctrl;
454 ctrl.setManualExposure(depthai->manual_exposure.exposure_time, depthai->manual_exposure.iso);
455 depthai->control_queue->send(ctrl);
456
457 depthai->manual_exposure.last_exposure_time = depthai->manual_exposure.exposure_time;
458 depthai->manual_exposure.last_iso = depthai->manual_exposure.iso;
459}
460
461static void
462depthai_maybe_send_floodlight_command(struct depthai_fs *depthai)
463{
464 if (!(depthai->floodlights.has && depthai->floodlights.manual_control)) {
465 return;
466 }
467
468 // If the user hasn't changed the exposure values since last we sent a command, we don't need to send a new one.
469 if (depthai->floodlights.last_mA == depthai->floodlights.mA.val) {
470 return;
471 }
472
473 depthai->device->setIrFloodLightBrightness(depthai->floodlights.mA.val);
474}
475
476
477static void *
478depthai_mainloop(void *ptr)
479{
480 struct depthai_fs *depthai = (struct depthai_fs *)ptr;
481
482 U_TRACE_SET_THREAD_NAME("DepthAI: Image");
483 os_thread_helper_name(&depthai->image_thread, "DepthAI: Image");
484
485 DEPTHAI_DEBUG(depthai, "DepthAI: Image thread called");
486
487 os_thread_helper_lock(&depthai->image_thread);
488 while (os_thread_helper_is_running_locked(&depthai->image_thread)) {
489 os_thread_helper_unlock(&depthai->image_thread);
490
491 depthai_do_one_frame(depthai);
492
493 depthai_maybe_send_exposure_command(depthai);
494 depthai_maybe_send_floodlight_command(depthai);
495
496 // Need to lock the thread when we go back to the while condition.
497 os_thread_helper_lock(&depthai->image_thread);
498 }
499 os_thread_helper_unlock(&depthai->image_thread);
500
501 DEPTHAI_DEBUG(depthai, "DepthAI: Image thread exiting");
502
503 return nullptr;
504}
505
506int64_t
507dai_ts_to_monado_ts(dai::Timestamp &in)
508{
509 return std::chrono::time_point<std::chrono::steady_clock, std::chrono::steady_clock::duration>{
510 std::chrono::seconds(in.sec) + std::chrono::nanoseconds(in.nsec)}
511 .time_since_epoch()
512 .count();
513}
514
515// Look at the WMR driver - that's where these averaging shenanigans come from ;)
516static void
517depthai_do_one_imu_frame(struct depthai_fs *depthai)
518{
519 std::shared_ptr<dai::IMUData> imuData = depthai->imu_queue->get<dai::IMUData>();
520
521 if (depthai->first_frames_idx < debug_get_num_option_depthai_startup_wait_frames()) {
522 return;
523 }
524
525
526 std::vector<dai::IMUPacket> imuPackets = imuData->packets;
527 uint32_t num_packets = (uint32_t)imuPackets.size();
528
529 /*
530 * We used to check num_packets here, but don't since they are now
531 * configurable. Tho we probably should test them, or warn when the
532 * number of packets is larger then batch size for too long.
533 */
534
535 struct xrt_vec3 a = {0, 0, 0};
536 struct xrt_vec3 g = {0, 0, 0};
537
538 int64_t ts = 0;
539
540 for (dai::IMUPacket imuPacket : imuPackets) {
541
542 dai::IMUReportAccelerometer &accel = imuPacket.acceleroMeter;
543 dai::IMUReportGyroscope &gyro = imuPacket.gyroscope;
544
545
546 int64_t ts_accel = dai_ts_to_monado_ts(accel.timestamp);
547 int64_t ts_gyro = dai_ts_to_monado_ts(gyro.timestamp);
548 int64_t diff = (ts_gyro - ts_accel);
549
550 ts += ts_accel / (2 * num_packets);
551 ts += ts_gyro / (2 * num_packets);
552
553 float diff_in_ms = (float)(abs((double)diff) / (double)U_TIME_1MS_IN_NS);
554 if (diff_in_ms > 2.5) {
555 DEPTHAI_WARN(depthai, "Accel and gyro samples are too far apart - %f ms!", diff_in_ms);
556 }
557
558 struct xrt_vec3 this_a = {accel.x, accel.y, accel.z};
559 struct xrt_vec3 this_g = {gyro.x, gyro.y, gyro.z};
560
561
562 math_vec3_accum(&this_a, &a);
563 math_vec3_accum(&this_g, &g);
564 }
565
566 if (num_packets > 1) {
567 float scalar = 1.0f / (float)num_packets;
568 math_vec3_scalar_mul(scalar, &a);
569 math_vec3_scalar_mul(scalar, &g);
570 }
571
572
573 // Prepare sample
574 xrt_imu_sample sample;
575 sample.timestamp_ns = ts;
576
577 // Need to swap x and y axis for Oak-D cameras at least:
578 sample.accel_m_s2.x = a.y;
579 sample.accel_m_s2.y = -a.x;
580 sample.accel_m_s2.z = a.z;
581
582 sample.gyro_rad_secs.x = g.y;
583 sample.gyro_rad_secs.y = -g.x;
584 sample.gyro_rad_secs.z = g.z;
585
586 // Sample prepared, now push it out.
587 xrt_sink_push_imu(depthai->imu_sink, &sample);
588
589 // Only do this if we are really debugging stuff.
590#ifdef XRT_FEATURE_TRACING
591 static timepoint_ns last_ns = 0;
592 if (last_ns == 0) {
593 last_ns = ts - U_TIME_1MS_IN_NS; // Just so it isn't zero.
594 }
595
596 timepoint_ns now_ns = (timepoint_ns)os_monotonic_get_ns();
597 timepoint_ns now_diff_ns = ts - now_ns;
598 timepoint_ns last_diff_ns = ts - last_ns;
599 last_ns = ts;
600
601 double now_diff_ms = time_ns_to_ms_f(now_diff_ns);
602 double last_diff_ms = time_ns_to_ms_f(last_diff_ns);
603
604 float gyro_length = m_vec3_len(g);
605 float weighted_gyro_length = gyro_length * time_ns_to_s(last_diff_ns);
606
607#ifdef U_TRACE_TRACY
608 TracyCPlot("DepthAI IMU to now(ms)", now_diff_ms);
609 TracyCPlot("DepthAI IMU to last(ms)", last_diff_ms);
610 TracyCPlot("DepthAI IMU num packets", num_packets);
611 TracyCPlot("DepthAI IMU gyro length", gyro_length);
612 TracyCPlot("DepthAI IMU gyro weighted length", weighted_gyro_length);
613#endif
614#endif
615}
616
617static void *
618depthai_imu_mainloop(void *ptr)
619{
620 struct depthai_fs *depthai = (struct depthai_fs *)ptr;
621
622 U_TRACE_SET_THREAD_NAME("DepthAI: IMU");
623 os_thread_helper_name(&depthai->imu_thread, "DepthAI: IMU");
624
625#ifdef XRT_OS_LINUX
626 // Try to raise priority of this thread.
627 u_linux_try_to_set_realtime_priority_on_thread(depthai->log_level, "DepthAI: IMU");
628#endif
629
630 DEPTHAI_DEBUG(depthai, "DepthAI: IMU thread called");
631
632 os_thread_helper_lock(&depthai->imu_thread);
633 while (os_thread_helper_is_running_locked(&depthai->imu_thread)) {
634 os_thread_helper_unlock(&depthai->imu_thread);
635
636 depthai_do_one_imu_frame(depthai);
637
638 // Need to lock the thread when we go back to the while condition.
639 os_thread_helper_lock(&depthai->imu_thread);
640 }
641 os_thread_helper_unlock(&depthai->imu_thread);
642
643 DEPTHAI_DEBUG(depthai, "DepthAI: IMU thread exiting");
644
645 return nullptr;
646}
647
648static bool
649depthai_destroy(struct depthai_fs *depthai)
650{
651 DEPTHAI_DEBUG(depthai, "DepthAI: Frameserver destroy called");
652 os_thread_helper_destroy(&depthai->image_thread);
653 os_thread_helper_destroy(&depthai->imu_thread);
654 u_var_remove_root(depthai);
655 for (int i = 0; i < 4; i++) {
656 u_sink_debug_destroy(&depthai->debug_sinks[i]);
657 }
658
659 // To work around use after free issue detected by ASan, v2.13.3 has this bug.
660 if (depthai->image_queue) {
661 depthai->image_queue->close();
662 }
663 if (depthai->imu_queue) {
664 depthai->imu_queue->close();
665 }
666 delete depthai->device;
667
668 free(depthai);
669
670 return true;
671}
672
673static void
674depthai_setup_monocular_pipeline(struct depthai_fs *depthai, enum depthai_camera_type camera_type)
675{
676 switch (camera_type) {
677 case (RGB_OV_9782):
678 depthai->width = 1280;
679 depthai->height = 800;
680 depthai->format = XRT_FORMAT_R8G8B8;
681 depthai->color_sensor_resolution = dai::ColorCameraProperties::SensorResolution::THE_800_P;
682 depthai->image_orientation = dai::CameraImageOrientation::ROTATE_180_DEG;
683 depthai->fps = 60; // Currently only supports 60.
684 depthai->interleaved = true;
685 depthai->color_order = dai::ColorCameraProperties::ColorOrder::RGB;
686 break;
687 case (RGB_IMX_378):
688 depthai->width = 1920;
689 depthai->height = 1080;
690 depthai->format = XRT_FORMAT_R8G8B8;
691 depthai->color_sensor_resolution = dai::ColorCameraProperties::SensorResolution::THE_1080_P;
692 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
693 depthai->fps = 60; // API says max is 118, anything over 60 seems broken with the v2.13.3 release.
694 depthai->interleaved = true;
695 depthai->color_order = dai::ColorCameraProperties::ColorOrder::RGB;
696 break;
697 case (GRAY_OV_9282_L):
698 depthai->width = 1280;
699 depthai->height = 800;
700 depthai->format = XRT_FORMAT_L8;
701 depthai->camera_board_socket = dai::CameraBoardSocket::LEFT;
702 depthai->grayscale_sensor_resolution = dai::MonoCameraProperties::SensorResolution::THE_800_P;
703 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
704 depthai->fps = 60; // Currently only supports 60.
705 break;
706 case (GRAY_OV_9282_R):
707 depthai->width = 1280;
708 depthai->height = 800;
709 depthai->format = XRT_FORMAT_L8;
710 depthai->camera_board_socket = dai::CameraBoardSocket::RIGHT;
711 depthai->grayscale_sensor_resolution = dai::MonoCameraProperties::SensorResolution::THE_800_P;
712 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
713 depthai->fps = 60; // Currently only supports 60.
714 break;
715 case (GRAY_OV_7251_L):
716 depthai->width = 640;
717 depthai->height = 480;
718 depthai->format = XRT_FORMAT_L8;
719 depthai->camera_board_socket = dai::CameraBoardSocket::LEFT;
720 depthai->grayscale_sensor_resolution = dai::MonoCameraProperties::SensorResolution::THE_480_P;
721 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
722 depthai->fps = 60; // Currently only supports 60.
723 break;
724 case (GRAY_OV_7251_R):
725 depthai->width = 640;
726 depthai->height = 480;
727 depthai->format = XRT_FORMAT_L8;
728 depthai->camera_board_socket = dai::CameraBoardSocket::RIGHT;
729 depthai->grayscale_sensor_resolution = dai::MonoCameraProperties::SensorResolution::THE_480_P;
730 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
731 depthai->fps = 60; // Currently only supports 60.
732 break;
733 default: assert(false);
734 }
735
736 dai::Pipeline p = {};
737
738 auto xlinkOut = p.create<dai::node::XLinkOut>();
739 xlinkOut->setStreamName("preview");
740
741 std::shared_ptr<dai::node::ColorCamera> colorCam = nullptr;
742 std::shared_ptr<dai::node::MonoCamera> grayCam = nullptr;
743
744 if (depthai->format == XRT_FORMAT_R8G8B8) {
745 colorCam = p.create<dai::node::ColorCamera>();
746 colorCam->setPreviewSize(depthai->width, depthai->height);
747 colorCam->setResolution(depthai->color_sensor_resolution);
748 colorCam->setImageOrientation(depthai->image_orientation);
749 colorCam->setInterleaved(depthai->interleaved);
750 colorCam->setFps(depthai->fps);
751 colorCam->setColorOrder(depthai->color_order);
752
753 // Link plugins CAM -> XLINK
754 colorCam->preview.link(xlinkOut->input);
755 }
756
757 if (depthai->format == XRT_FORMAT_L8) {
758 grayCam = p.create<dai::node::MonoCamera>();
759 grayCam->setBoardSocket(depthai->camera_board_socket);
760 grayCam->setResolution(depthai->grayscale_sensor_resolution);
761 grayCam->setImageOrientation(depthai->image_orientation);
762 grayCam->setFps(depthai->fps);
763
764 // Link plugins CAM -> XLINK
765 grayCam->out.link(xlinkOut->input);
766 }
767
768 p.setXLinkChunkSize(0);
769
770 // Start the pipeline
771 depthai->device->startPipeline(p);
772 depthai->image_queue = depthai->device->getOutputQueue("preview", 1, false).get(); // out of shared pointer
773}
774
775static void
776depthai_setup_stereo_grayscale_pipeline(struct depthai_fs *depthai)
777{
778 // Hardcoded to OV_9282 L/R
779 if (!depthai->oak_d_lite) {
780 // OV_9282 L/R
781 depthai->width = 1280;
782 depthai->height = 800;
783 if (depthai->half_size_ov9282) {
784 depthai->width /= 2;
785 depthai->height /= 2;
786 depthai->grayscale_sensor_resolution = dai::MonoCameraProperties::SensorResolution::THE_400_P;
787 } else {
788 depthai->grayscale_sensor_resolution = dai::MonoCameraProperties::SensorResolution::THE_800_P;
789 }
790 depthai->format = XRT_FORMAT_L8;
791 depthai->camera_board_socket = dai::CameraBoardSocket::LEFT;
792 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
793 } else {
794 // OV_7251 L/R
795 depthai->width = 640;
796 depthai->height = 480;
797 depthai->format = XRT_FORMAT_L8;
798 depthai->camera_board_socket = dai::CameraBoardSocket::LEFT;
799 depthai->grayscale_sensor_resolution = dai::MonoCameraProperties::SensorResolution::THE_480_P;
800 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
801 }
802
803 dai::Pipeline p = {};
804
805 const char *name_images = "image_frames";
806 const char *name_imu = "imu_samples";
807
808 auto controlIn = p.create<dai::node::XLinkIn>();
809 controlIn->setStreamName("control");
810
811 if (depthai->want_cameras) {
812
813 std::shared_ptr<dai::node::XLinkOut> xlinkOut = p.create<dai::node::XLinkOut>();
814 xlinkOut->setStreamName(name_images);
815
816 dai::CameraBoardSocket sockets[2] = {
817 dai::CameraBoardSocket::LEFT,
818 dai::CameraBoardSocket::RIGHT,
819 };
820
821 for (int i = 0; i < 2; i++) {
822 std::shared_ptr<dai::node::MonoCamera> grayCam = nullptr;
823
824 grayCam = p.create<dai::node::MonoCamera>();
825 grayCam->setBoardSocket(sockets[i]);
826 grayCam->setResolution(depthai->grayscale_sensor_resolution);
827 grayCam->setImageOrientation(depthai->image_orientation);
828 grayCam->setFps(depthai->fps);
829
830 // Link plugins CAM -> XLINK
831 grayCam->out.link(xlinkOut->input);
832 // Link control to camera
833 controlIn->out.link(grayCam->inputControl);
834 }
835 }
836
837 if (depthai->want_imu) {
838 uint32_t imu_hz = (uint32_t)debug_get_num_option_depthai_imu_hz();
839 uint32_t batch_size = (uint32_t)debug_get_num_option_depthai_imu_batch_size();
840 uint32_t max_batch_size = (uint32_t)debug_get_num_option_depthai_imu_max_batch_size();
841
842 /*
843 * Limitations from:
844 * https://docs.luxonis.com/projects/api/en/latest/components/nodes/imu/#limitations
845 */
846 switch (imu_hz) {
847 case 400: // Supposed to be okay
848 DEPTHAI_DEBUG(depthai, "%uHz IMU sample rate is supposed to be ok.", imu_hz);
849 break;
850 case 500: // Maybe ok?
851 DEPTHAI_INFO(depthai, "%uHz IMU sample rate maybe produce jitters.", imu_hz);
852 break;
853 default: // Not known to be good on any (or both IMU and both Gyra/Accel at the same time).
854 DEPTHAI_WARN(depthai, "%uHz IMU sample rate not a known good rate.", imu_hz);
855 break;
856 }
857
858 switch (batch_size) {
859 case 1:
860 case 2: // Seems okay
861 DEPTHAI_DEBUG(depthai, "%u IMU batch size is supposed to be ok.", batch_size);
862 break;
863 default: // Not known to be good on any
864 DEPTHAI_WARN(depthai, "%iHz IMU batch size is not tested!", batch_size);
865 break;
866 }
867
868 if (max_batch_size < 2) {
869 DEPTHAI_WARN(depthai, "Max batch size(%u) smaller then 2, setting two.", max_batch_size);
870 max_batch_size = 2;
871 }
872
873 if (max_batch_size < batch_size) {
874 DEPTHAI_WARN(depthai, "Max batch size(%u) smaller then batch size(%u), setting to batch size.",
875 max_batch_size, batch_size);
876 max_batch_size = batch_size;
877 }
878
879 std::shared_ptr<dai::node::XLinkOut> xlinkOut_imu = p.create<dai::node::XLinkOut>();
880 xlinkOut_imu->setStreamName(name_imu);
881
882 auto imu = p.create<dai::node::IMU>();
883 imu->enableIMUSensor({dai::IMUSensor::ACCELEROMETER_RAW, dai::IMUSensor::GYROSCOPE_RAW}, imu_hz);
884 imu->setBatchReportThreshold(batch_size);
885 imu->setMaxBatchReports(max_batch_size);
886 imu->out.link(xlinkOut_imu->input);
887 }
888
889 p.setXLinkChunkSize(0);
890
891 // Start the pipeline
892 depthai->device->startPipeline(p);
893 if (depthai->want_cameras) {
894 depthai->image_queue =
895 depthai->device->getOutputQueue(name_images, 4, false).get(); // out of shared pointer
896 }
897 if (depthai->want_imu) {
898 depthai->imu_queue = depthai->device->getOutputQueue(name_imu, 4, false).get(); // out of shared pointer
899 }
900
901
902 depthai->control_queue = depthai->device->getInputQueue("control").get();
903
904 if (depthai->floodlights.has) {
905 float mA = depthai->floodlights.mA.val;
906
907 if (mA > 1500.0f) {
908 DEPTHAI_ERROR(depthai, "Can not set brightness to more then 1500mA, clamping!");
909 mA = 1500.0f;
910 }
911
912 if (mA > 0.0f) {
913 depthai->device->setIrFloodLightBrightness(mA);
914 }
915 }
916
917 //!@todo This code will turn the exposure time down, but you may not want it. Or we may want to rework Monado's
918 //! AEG code to control the IR floodlight brightness in concert with the exposure time. For now, disable.
919}
920
921#ifdef DEPTHAI_HAS_MULTICAM_SUPPORT
922static void
923depthai_setup_stereo_rgb_pipeline(struct depthai_fs *depthai)
924{
925 // Hardcoded to OV_9782 L/R
926 depthai->width = 1280;
927 depthai->height = 800;
928 depthai->format = XRT_FORMAT_R8G8B8;
929 depthai->camera_board_socket = dai::CameraBoardSocket::LEFT;
930 depthai->color_sensor_resolution = dai::ColorCameraProperties::SensorResolution::THE_800_P;
931 depthai->image_orientation = dai::CameraImageOrientation::AUTO;
932 depthai->fps = 30; // Supports up to 60, but pushing 60fps over USB is typically hard
933
934 dai::Pipeline p = {};
935
936 const char *name = "frames";
937 std::shared_ptr<dai::node::XLinkOut> xlinkOut = p.create<dai::node::XLinkOut>();
938 xlinkOut->setStreamName(name);
939
940 dai::CameraBoardSocket sockets[2] = {
941 dai::CameraBoardSocket::CAM_B,
942 dai::CameraBoardSocket::CAM_C,
943 };
944
945 for (int i = 0; i < 2; i++) {
946 std::shared_ptr<dai::node::ColorCamera> grayCam = nullptr;
947
948 grayCam = p.create<dai::node::ColorCamera>();
949 grayCam->setPreviewSize(1280, 800);
950 grayCam->setBoardSocket(sockets[i]);
951 grayCam->setResolution(depthai->color_sensor_resolution);
952 grayCam->setImageOrientation(depthai->image_orientation);
953 grayCam->setInterleaved(true);
954 grayCam->setFps(depthai->fps);
955 grayCam->setColorOrder(dai::ColorCameraProperties::ColorOrder::RGB);
956
957 // Link plugins CAM -> XLINK
958 grayCam->preview.link(xlinkOut->input);
959 }
960
961 p.setXLinkChunkSize(0);
962
963 // Start the pipeline
964 depthai->device->startPipeline(p);
965 depthai->queue = depthai->device->getOutputQueue(name, 4, false).get(); // out of shared pointer
966}
967#endif
968
969/*
970 *
971 * Frame server functions.
972 *
973 */
974
975/*!
976 * Cast to derived type.
977 */
978static inline struct depthai_fs *
979depthai_fs(struct xrt_fs *xfs)
980{
981 return (struct depthai_fs *)xfs;
982}
983
984static bool
985depthai_fs_enumerate_modes(struct xrt_fs *xfs, struct xrt_fs_mode **out_modes, uint32_t *out_count)
986{
987 struct depthai_fs *depthai = depthai_fs(xfs);
988 DEPTHAI_DEBUG(depthai, "DepthAI: Enumerate modes called");
989
990 struct xrt_fs_mode *modes = U_TYPED_ARRAY_CALLOC(struct xrt_fs_mode, 1);
991 if (modes == NULL) {
992 return false;
993 }
994
995 modes[0].width = depthai->width;
996 modes[0].height = depthai->height;
997 modes[0].format = depthai->format;
998 modes[0].stereo_format = XRT_STEREO_FORMAT_NONE;
999
1000 *out_modes = modes;
1001 *out_count = 1;
1002
1003 return true;
1004}
1005
1006static bool
1007depthai_fs_configure_capture(struct xrt_fs *xfs, struct xrt_fs_capture_parameters *cp)
1008{
1009 struct depthai_fs *depthai = depthai_fs(xfs);
1010 DEPTHAI_DEBUG(depthai, "DepthAI: Configure capture called");
1011
1012 // Noop
1013 return false;
1014}
1015
1016static bool
1017depthai_fs_stream_start(struct xrt_fs *xfs,
1018 struct xrt_frame_sink *xs,
1019 enum xrt_fs_capture_type capture_type,
1020 uint32_t descriptor_index)
1021{
1022 struct depthai_fs *depthai = depthai_fs(xfs);
1023 DEPTHAI_DEBUG(depthai, "DepthAI: Stream start called");
1024
1025 assert(descriptor_index == 0);
1026 (void)capture_type; // Don't care about this one just yet.
1027
1028 depthai->sink[0] = xs; // 0 == CamA-4L / RGB
1029 depthai->sink[1] = xs; // 1 == CamB-2L / Left Gray
1030 depthai->sink[2] = xs; // 2 == CamC-2L / Right Gray
1031 depthai->sink[3] = xs; // 3 == CamD-4L
1032
1033 os_thread_helper_start(&depthai->image_thread, depthai_mainloop, depthai);
1034
1035 return true;
1036}
1037
1038static bool
1039depthai_fs_slam_stream_start(struct xrt_fs *xfs, struct xrt_slam_sinks *sinks)
1040{
1041 struct depthai_fs *depthai = depthai_fs(xfs);
1042 DEPTHAI_DEBUG(depthai, "DepthAI: SLAM stream start called");
1043
1044 depthai->sink[0] = nullptr; // 0 == CamA-4L / RGB
1045 depthai->sink[1] = sinks->cams[0]; // 1 == CamB-2L / Left Gray
1046 depthai->sink[2] = sinks->cams[1]; // 2 == CamC-2L / Right Gray
1047 depthai->sink[3] = nullptr; // 3 == CamD-4L
1048 if (depthai->want_cameras && sinks->cams[0] != NULL && sinks->cams[1] != NULL) {
1049 os_thread_helper_start(&depthai->image_thread, depthai_mainloop, depthai);
1050 }
1051 if (depthai->want_imu && sinks->imu != NULL) {
1052 os_thread_helper_start(&depthai->imu_thread, depthai_imu_mainloop, depthai);
1053 depthai->imu_sink = sinks->imu;
1054 }
1055 return true;
1056}
1057
1058static bool
1059depthai_fs_stream_stop(struct xrt_fs *xfs)
1060{
1061 struct depthai_fs *depthai = depthai_fs(xfs);
1062 DEPTHAI_DEBUG(depthai, "DepthAI: Stream stop called");
1063
1064 // This call fully stops the thread.
1065 os_thread_helper_stop_and_wait(&depthai->image_thread);
1066 os_thread_helper_stop_and_wait(&depthai->imu_thread);
1067
1068 return true;
1069}
1070
1071static bool
1072depthai_fs_is_running(struct xrt_fs *xfs)
1073{
1074 struct depthai_fs *depthai = depthai_fs(xfs);
1075
1076 os_thread_helper_lock(&depthai->image_thread);
1077 bool running = os_thread_helper_is_running_locked(&depthai->image_thread);
1078 os_thread_helper_unlock(&depthai->image_thread);
1079
1080 return running;
1081}
1082
1083
1084/*
1085 *
1086 * Node functions.
1087 *
1088 */
1089
1090static void
1091depthai_fs_node_break_apart(struct xrt_frame_node *node)
1092{
1093 struct depthai_fs *depthai = container_of(node, struct depthai_fs, node);
1094 DEPTHAI_DEBUG(depthai, "DepthAI: Node break apart called");
1095
1096 depthai_fs_stream_stop(&depthai->base);
1097}
1098
1099static void
1100depthai_fs_node_destroy(struct xrt_frame_node *node)
1101{
1102 struct depthai_fs *depthai = container_of(node, struct depthai_fs, node);
1103 DEPTHAI_DEBUG(depthai, "DepthAI: Node destroy called");
1104
1105 // Safe to call, break apart have already stopped the stream.
1106 depthai_destroy(depthai);
1107}
1108
1109
1110/*
1111 *
1112 * Create function, needs to be last.
1113 *
1114 */
1115
1116static struct depthai_fs *
1117depthai_create_and_do_minimal_setup(void)
1118{
1119 // Try to create a device and see if that fail first.
1120 dai::Device *d;
1121 try {
1122 d = new dai::Device();
1123 } catch (std::exception &e) {
1124 std::string what = e.what();
1125 U_LOG_E("DepthAI error: %s", what.c_str());
1126 return nullptr;
1127 }
1128
1129 struct depthai_fs *depthai = U_TYPED_CALLOC(struct depthai_fs);
1130 depthai->base.enumerate_modes = depthai_fs_enumerate_modes;
1131 depthai->base.configure_capture = depthai_fs_configure_capture;
1132 depthai->base.stream_start = depthai_fs_stream_start;
1133 depthai->base.slam_stream_start = depthai_fs_slam_stream_start;
1134 depthai->base.stream_stop = depthai_fs_stream_stop;
1135 depthai->base.is_running = depthai_fs_is_running;
1136 depthai->node.break_apart = depthai_fs_node_break_apart;
1137 depthai->node.destroy = depthai_fs_node_destroy;
1138 depthai->log_level = debug_get_log_option_depthai_log();
1139 depthai->device = d;
1140
1141 depthai->manual_exposure.active = false;
1142 // Low values, useful for marker calibration on a monitor.
1143 depthai->manual_exposure.iso = 270;
1144 depthai->manual_exposure.exposure_time = 320;
1145
1146 depthai->manual_exposure.iso_ui.val = &depthai->manual_exposure.iso;
1147 depthai->manual_exposure.iso_ui.min = 0;
1148 depthai->manual_exposure.iso_ui.max = 1600;
1149 depthai->manual_exposure.iso_ui.step = 1;
1150
1151 depthai->manual_exposure.exposure_time_ui.val = &depthai->manual_exposure.exposure_time;
1152 depthai->manual_exposure.exposure_time_ui.min = 0;
1153 // 160,000 us = 0.1s
1154 depthai->manual_exposure.exposure_time_ui.max = 65535;
1155 depthai->manual_exposure.exposure_time_ui.step = 1;
1156
1157 depthai->floodlights.mA.val = debug_get_num_option_depthai_floodlight_brightness();
1158 depthai->floodlights.mA.min = 0.0f;
1159 depthai->floodlights.mA.max = 1500.0f;
1160 depthai->floodlights.mA.step = 1.0f;
1161
1162
1163
1164 u_var_add_root(depthai, "DepthAI Source", 0);
1165 for (int i = 0; i < 4; i++) {
1166 u_sink_debug_init(&depthai->debug_sinks[i]);
1167 }
1168 u_var_add_sink_debug(depthai, &depthai->debug_sinks[0], "RGB");
1169 u_var_add_sink_debug(depthai, &depthai->debug_sinks[1], "Left");
1170 u_var_add_sink_debug(depthai, &depthai->debug_sinks[2], "Right");
1171 u_var_add_sink_debug(depthai, &depthai->debug_sinks[3], "CamD");
1172
1173 u_var_add_bool(depthai, &depthai->manual_exposure.active, "Manual exposure");
1174
1175 u_var_add_draggable_u16(depthai, &depthai->manual_exposure.exposure_time_ui, "Exposure time");
1176 u_var_add_draggable_u16(depthai, &depthai->manual_exposure.iso_ui, "ISO");
1177
1178 depthai_guess_ir_drivers(depthai);
1179 if (depthai->floodlights.has) {
1180 u_var_add_bool(depthai, &depthai->floodlights.manual_control, "Manual floodlight control");
1181 u_var_add_draggable_f32(depthai, &depthai->floodlights.mA, "Floodlight brightness (mA)");
1182 }
1183
1184
1185 // Some debug printing.
1186 depthai_guess_camera_type(depthai);
1187
1188 depthai_print_calib(depthai);
1189
1190 // Make sure that the thread helper is initialised.
1191 os_thread_helper_init(&depthai->image_thread);
1192 os_thread_helper_init(&depthai->imu_thread);
1193
1194 return depthai;
1195}
1196
1197
1198/*
1199 *
1200 * 'Exported' functions.
1201 *
1202 */
1203
1204extern "C" struct xrt_fs *
1205depthai_fs_monocular_rgb(struct xrt_frame_context *xfctx)
1206{
1207 struct depthai_fs *depthai = depthai_create_and_do_minimal_setup();
1208 if (depthai == nullptr) {
1209 return nullptr;
1210 }
1211
1212 // Set after checking for null.
1213 depthai->want_imu = false;
1214 depthai->want_cameras = true;
1215
1216 // Currently hardcoded to the default Oak-D camera.
1217 enum depthai_camera_type camera_type = RGB_IMX_378;
1218
1219 // Last bit is to setup the pipeline.
1220 depthai_setup_monocular_pipeline(depthai, camera_type);
1221
1222 // And finally add us to the context when we are done.
1223 xrt_frame_context_add(xfctx, &depthai->node);
1224
1225 DEPTHAI_DEBUG(depthai, "DepthAI: Created");
1226
1227 return &depthai->base;
1228}
1229
1230extern "C" struct xrt_fs *
1231depthai_fs_slam(struct xrt_frame_context *xfctx, struct depthai_slam_startup_settings *settings)
1232{
1233 struct depthai_fs *depthai = depthai_create_and_do_minimal_setup();
1234 if (depthai == nullptr) {
1235 return nullptr;
1236 }
1237
1238 // Set after checking for null.
1239 depthai->fps = settings->frames_per_second;
1240 depthai->want_cameras = settings->want_cameras;
1241 depthai->want_imu = settings->want_imu;
1242 depthai->half_size_ov9282 = settings->half_size_ov9282;
1243
1244 // Last bit is to setup the pipeline.
1245 depthai_setup_stereo_grayscale_pipeline(depthai);
1246
1247 // And finally add us to the context when we are done.
1248 xrt_frame_context_add(xfctx, &depthai->node);
1249
1250 DEPTHAI_DEBUG(depthai, "DepthAI: Created");
1251
1252 return &depthai->base;
1253}
1254
1255extern "C" struct xrt_fs *
1256depthai_fs_stereo_grayscale_and_imu(struct xrt_frame_context *xfctx)
1257{
1258 struct depthai_fs *depthai = depthai_create_and_do_minimal_setup();
1259 if (depthai == nullptr) {
1260 return nullptr;
1261 }
1262
1263 // Set after checking for null.
1264 depthai->want_cameras = true;
1265 depthai->want_imu = true;
1266
1267 // Last bit is to setup the pipeline.
1268 depthai_setup_stereo_grayscale_pipeline(depthai);
1269
1270 // And finally add us to the context when we are done.
1271 xrt_frame_context_add(xfctx, &depthai->node);
1272
1273 DEPTHAI_DEBUG(depthai, "DepthAI: Created");
1274
1275 return &depthai->base;
1276}
1277
1278
1279extern "C" struct xrt_fs *
1280depthai_fs_just_imu(struct xrt_frame_context *xfctx)
1281{
1282 struct depthai_fs *depthai = depthai_create_and_do_minimal_setup();
1283 if (depthai == nullptr) {
1284 return nullptr;
1285 }
1286
1287 // Set after checking for null.
1288 depthai->want_cameras = false;
1289 depthai->want_imu = true;
1290
1291 // Last bit is to setup the pipeline.
1292 depthai_setup_stereo_grayscale_pipeline(depthai);
1293
1294 // And finally add us to the context when we are done.
1295 xrt_frame_context_add(xfctx, &depthai->node);
1296
1297 DEPTHAI_DEBUG(depthai, "DepthAI: Created");
1298
1299 return &depthai->base;
1300}
1301
1302#ifdef DEPTHAI_HAS_MULTICAM_SUPPORT
1303extern "C" struct xrt_fs *
1304depthai_fs_stereo_rgb(struct xrt_frame_context *xfctx)
1305{
1306 struct depthai_fs *depthai = depthai_create_and_do_minimal_setup();
1307 if (depthai == nullptr) {
1308 return nullptr;
1309 }
1310
1311 // Last bit is to setup the pipeline.
1312 depthai_setup_stereo_rgb_pipeline(depthai);
1313
1314 // And finally add us to the context when we are done.
1315
1316 xrt_frame_context_add(xfctx, &depthai->node);
1317 DEPTHAI_DEBUG(depthai, "DepthAI: Created");
1318 return &depthai->base;
1319}
1320#endif
1321
1322extern "C" bool
1323depthai_fs_get_stereo_calibration(struct xrt_fs *xfs, struct t_stereo_camera_calibration **c_ptr)
1324{
1325 struct depthai_fs *depthai = depthai_fs(xfs);
1326
1327 return depthai_get_gray_cameras_calibration(depthai, c_ptr);
1328}