The open source OpenXR runtime
at main 6.8 kB view raw
1#!/usr/bin/env python3 2# Copyright 2022, Collabora, Ltd. 3# SPDX-License-Identifier: BSL-1.0 4# Authors: Moshi Turner <moshiturner@protonmail.com> 5"""Simple script to upload Monado camera calibrations to DepthAI devices.""" 6 7# Todo, make this work with calibrations from Basalt 8 9from dataclasses import dataclass 10from typing import Any, Callable, ClassVar, Dict, Iterator, List, Optional, Tuple 11import cv2 12import cv2.fisheye 13import depthai as dai 14import json 15import numpy as np 16import math 17import argparse 18 19parser = argparse.ArgumentParser(description='Train keypoints network') 20 21parser.add_argument("calibration_file") 22 23parser.add_argument('--super-cow', 24 help="I know what I'm doing, run the script with no prompt", 25 dest='super_cow', action='store_true' 26 ) 27 28parser.add_argument('--baseline', 29 help="Specified camera baseline (by CAD, or whatever), in centimeters", 30 type=float 31 ) 32 33parser.set_defaults(super_cow=False) 34parser.set_defaults(baseline=8) 35args = parser.parse_args() 36 37# print(args.super_cow, args.calibration_file) 38 39 40if (not args.super_cow): 41 print("Warning! This script will erase the current calibration on your DepthAI device and replace it with something new, and there is no going back!") 42 print("Also, there is no way to specify which device to upload to, so make sure you've only plugged one in!") 43 print("If you don't know what you're doing, please exit this script!") 44 print("Otherwise, type \"I know what I am doing\"") 45 46 text = input() 47 48 if (text != "I know what I am doing"): 49 print("Prompt failed!") 50 exit() 51 52print(args.baseline) 53 54# Create pipeline 55pipeline = dai.Pipeline() 56 57# Define sources and outputs 58monoLeft = pipeline.create(dai.node.MonoCamera) 59monoRight = pipeline.create(dai.node.MonoCamera) 60xoutLeft = pipeline.create(dai.node.XLinkOut) 61xoutRight = pipeline.create(dai.node.XLinkOut) 62 63xoutLeft.setStreamName('left') 64xoutRight.setStreamName('right') 65 66# Properties 67monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT) 68monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P) 69monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT) 70monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P) 71 72# Linking 73monoRight.out.link(xoutRight.input) 74monoLeft.out.link(xoutLeft.input) 75 76 77@dataclass 78class camera: 79 camera_matrix: List[List[float]] 80 distortion: List[float] 81 # rotation_matrix: List[List[float]] 82 83 84with open(args.calibration_file) as f: 85 calibration_json = json.load(f) 86 87fisheye = calibration_json["cameras"][0]["model"] == "fisheye_equidistant4" 88 89print("Fisheye:", fisheye) 90 91if fisheye: 92 camera_model = dai.CameraModel.Fisheye 93else: 94 camera_model = dai.CameraModel.Perspective 95 96# Connect to device and start pipeline 97with dai.Device(pipeline) as device: 98 99 # Output queues will be used to get the grayscale frames from the outputs defined above 100 qLeft = device.getOutputQueue(name="left", maxSize=4, blocking=False) 101 qRight = device.getOutputQueue(name="right", maxSize=4, blocking=False) 102 103 while True: 104 # Instead of get (blocking), we use tryGet (nonblocking) which will return the available data or None otherwise 105 inLeft = qLeft.tryGet() 106 inRight = qRight.tryGet() 107 108 if inLeft is not None: 109 cv2.imshow("left", inLeft.getCvFrame()) 110 111 if inRight is not None: 112 cv2.imshow("right", inRight.getCvFrame()) 113 114 if cv2.waitKey(1) == ord('q'): 115 break 116 break 117 118 # [[0,0,0],[0,0,0],[0,0,0]] 119 cameras = [camera([[0, 0, 0], [0, 0, 0], [0, 0, 0]], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), 120 camera([[0, 0, 0], [0, 0, 0], [0, 0, 0]], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])] 121 122 for ele, camera_struct in zip(calibration_json["cameras"], cameras): 123 Lint = ele["intrinsics"] 124 camera_struct.camera_matrix = [ 125 [Lint["fx"], 0, Lint["cx"]], 126 [0, Lint["fy"], Lint["cy"]], 127 [0, 0, 1] 128 ] 129 Ldist = ele["distortion"] 130 if fisheye: 131 camera_struct.distortion[0] = Ldist["k1"] 132 camera_struct.distortion[1] = Ldist["k2"] 133 camera_struct.distortion[2] = Ldist["k3"] 134 camera_struct.distortion[3] = Ldist["k4"] 135 else: 136 camera_struct.distortion[0] = Ldist["k1"] 137 camera_struct.distortion[1] = Ldist["k2"] 138 camera_struct.distortion[2] = Ldist["p1"] 139 camera_struct.distortion[3] = Ldist["p2"] 140 camera_struct.distortion[4] = Ldist["k3"] 141 142 R = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=np.float32) 143 acc_idx = 0 144 for row in range(3): 145 for col in range(3): 146 R[row][col] = calibration_json["opencv_stereo_calibrate"]["rotation"][acc_idx] 147 acc_idx += 1 148 149 T = np.array(calibration_json["opencv_stereo_calibrate"]["translation"]) 150 T *= 100 # Centimeters 151 152 if False: 153 print(R, T) 154 print(np.array(cameras[0].camera_matrix), 155 np.array(cameras[0].distortion), 156 np.array(cameras[1].camera_matrix), 157 np.array(cameras[1].distortion)) 158 159 print(cameras[0].distortion[:4]) 160 161 R1, R2, P1, P2, Q = cv2.fisheye.stereoRectify( 162 np.array(cameras[0].camera_matrix), 163 np.array(cameras[0].distortion[:4]), 164 np.array(cameras[1].camera_matrix), 165 np.array(cameras[1].distortion[:4]), 166 (1280, 800), # imagesize 167 R, 168 T, 169 0 170 ) 171 172 calh = dai.CalibrationHandler() 173 174 current_eeprom = device.readCalibration().getEepromData() 175 176 # currently only copies whatever is on the device to the new calibration 177 calh.setBoardInfo(productName = current_eeprom.productName, boardName = current_eeprom.boardName, 178 boardRev = current_eeprom.boardRev, boardConf = current_eeprom.boardConf, 179 hardwareConf = current_eeprom.hardwareConf, batchName = current_eeprom.batchName, 180 batchTime = current_eeprom.batchTime, boardOptions = current_eeprom.boardOptions, 181 boardCustom = current_eeprom.boardCustom) 182 183 calh.setCameraExtrinsics(dai.CameraBoardSocket.LEFT, dai.CameraBoardSocket.RIGHT, 184 R, translation=T, specTranslation=[args.baseline, 0, 0]) 185 186 calh.setCameraIntrinsics(dai.CameraBoardSocket.LEFT, cameras[0].camera_matrix, 1280, 800) 187 calh.setCameraIntrinsics(dai.CameraBoardSocket.RIGHT, cameras[1].camera_matrix, 1280, 800) 188 189 calh.setCameraType(dai.CameraBoardSocket.LEFT, camera_model) 190 calh.setCameraType(dai.CameraBoardSocket.RIGHT, camera_model) 191 192 calh.setDistortionCoefficients(dai.CameraBoardSocket.LEFT, cameras[0].distortion) 193 calh.setDistortionCoefficients(dai.CameraBoardSocket.RIGHT, cameras[1].distortion) 194 195 calh.setStereoLeft(dai.CameraBoardSocket.LEFT, R1) 196 calh.setStereoRight(dai.CameraBoardSocket.RIGHT, R2) 197 198 success = device.flashCalibration(calh) 199 200 print("success is:", success)