Adjusted camera example application to utilize calibration data

Signed-off-by: Jacki <jacki@thejackimonster.de>
This commit is contained in:
Jacki 2025-06-05 02:52:59 +02:00
parent 9016a16b19
commit 6314c867ca
No known key found for this signature in database
GPG key ID: B404184796354C5E
3 changed files with 692 additions and 262 deletions

View file

@ -1,8 +1,33 @@
//
// Created by thejackimonster on 04.06.25.
//
// Copyright (c) 2025 thejackimonster. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
#include <opencv2/opencv.hpp>
#include <opencv2/videoio.hpp>
#include <stdio.h>
#include "device_imu.h"
const uint8_t chunk_map[128] = {
119, 54, 21, 0, 108, 22, 51, 63, 93, 99, 67, 7, 32, 112, 52,
43, 14, 35, 75, 116, 64, 71, 44, 89, 18, 88, 26, 61, 70, 56,
@ -25,6 +50,39 @@ const uint8_t chunk_map[128] = {
#define CAM_BUFFER_SIZE ((CAM_HEIGHT + CAM_HEADER_LEN) * CAM_WIDTH)
#define CAM_IMAGE_DATA_SIZE (CAM_HEIGHT * CAM_WIDTH)
struct CameraCalibration {
struct {
size_t width;
size_t height;
} resolution;
float fx;
float fy;
float cx;
float cy;
size_t num_kc;
union {
float kc [12];
struct {
float k1;
float k2;
float k3;
float k4;
float k5;
float k6;
float p1;
float p2;
float s1;
float s2;
float s3;
float s4;
};
};
};
const float fx = 239.9401992353717F;
const float fy = 240.1440084390103F;
const float cx = 244.88379123391678F;
@ -43,299 +101,368 @@ const float s2 = -0.0003866048077984273F;
const float s3 = +0.007856725692160133F;
const float s4 = +0.0003370697673688772F;
typedef cv::Point2f (*apply_variant_func)(float xn, float yn);
typedef cv::Point2f (*apply_variant_func)(const CameraCalibration& cam, float xn, float yn);
cv::Point2f apply_intrinsic_only(float xn, float yn) {
cv::Point2f apply_intrinsic_only(const CameraCalibration& cam, float xn, float yn) {
cv::Point2f p;
p.x = fx * xn + cx;
p.y = fy * yn + cy;
p.x = cam.fx * xn + cam.cx;
p.y = cam.fy * yn + cam.cy;
return p;
}
cv::Point2f apply_radial_only(float xn, float yn) {
cv::Point2f p;
float r, sp, cp;
float th, th_dist;
cv::Point2f apply_radial_only(const CameraCalibration& cam, float xn, float yn) {
cv::Point2f p;
float r, sp, cp;
float th, th_dist;
r = sqrtf(xn * xn + yn * yn);
if (r == 0) {
cp = 1.0F;
sp = 0.0F;
} else {
cp = xn / r;
sp = yn / r;
}
r = sqrtf(xn * xn + yn * yn);
th = atanf(r);
th_dist = th;
const float th2 = th * th;
const float th3 = th * th * th;
th_dist += k1 * th3;
th_dist += k2 * th2 * th3;
th_dist += k3 * th2 * th2 * th3;
th_dist += k4 * th3 * th3 * th3;
th_dist += k5 * th3 * th3 * th2 * th3;
th_dist += k6 * th3 * th3 * th2 * th2 * th3;
xn = th_dist * cp;
yn = th_dist * sp;
p.x = fx * xn + cx;
p.y = fy * yn + cy;
return p;
}
cv::Point2f apply_radial_tangential(float xn, float yn) {
cv::Point2f p;
float r, sp, cp;
float th, th_dist;
float dx, dy;
r = sqrtf(xn * xn + yn * yn);
if (r == 0) {
cp = 1.0F;
sp = 0.0F;
} else {
cp = xn / r;
sp = yn / r;
}
th = atanf(r);
th_dist = th;
const float th2 = th * th;
const float th3 = th * th * th;
th_dist += k1 * th3;
th_dist += k2 * th2 * th3;
th_dist += k3 * th2 * th2 * th3;
th_dist += k4 * th3 * th3 * th3;
th_dist += k5 * th3 * th3 * th2 * th3;
th_dist += k6 * th3 * th3 * th2 * th2 * th3;
xn = th_dist * cp;
yn = th_dist * sp;
r = xn * xn + yn * yn;
dx = (2.0F * xn * xn + r) * p1 + 2.0F * xn * yn * p2;
dy = (2.0F * yn * yn + r) * p2 + 2.0F * xn * yn * p1;
xn += dx;
yn += dy;
p.x = fx * xn + cx;
p.y = fy * yn + cy;
return p;
}
cv::Point2f apply_fisheye624(float xn, float yn) {
cv::Point2f p;
float r, sp, cp;
float th, th_dist;
float dx, dy;
r = sqrtf(xn * xn + yn * yn);
if (r == 0) {
cp = 1.0F;
sp = 0.0F;
} else {
cp = xn / r;
sp = yn / r;
}
th = atanf(r);
th_dist = th;
const float th2 = th * th;
const float th3 = th * th * th;
const float th5 = th3 * th2;
const float th6 = th3 * th3;
th_dist += k1 * th3;
th_dist += k2 * th5;
th_dist += k3 * th5 * th2;
th_dist += k4 * th6 * th3;
th_dist += k5 * th6 * th5;
th_dist += k6 * th6 * th5 * th2;
xn = th_dist * cp;
yn = th_dist * sp;
r = xn * xn + yn * yn;
dx = (2.0F * xn * xn + r) * p1 + 2.0F * xn * yn * p2;
dy = (2.0F * yn * yn + r) * p2 + 2.0F * xn * yn * p1;
dx += (s1 + s2 * r) * r;
dy += (s3 + s4 * r) * r;
xn += dx;
yn += dy;
p.x = fx * xn + cx;
p.y = fy * yn + cy;
return p;
}
cv::Mat build_maps_variant(apply_variant_func func, uint32_t width,
uint32_t height) {
cv::Mat map = cv::Mat(cv::Size(height, width), CV_16SC2);
cv::Point2f p;
cv::Vec2s v;
size_t x, y;
for (y = 0; y < map.rows; y++) {
for (x = 0; x < map.cols; x++) {
const float xn = (x - cx) / fx;
const float yn = (y - cy) / fy;
p = func(xn, yn);
v[0] = static_cast<short>(p.x);
v[1] = static_cast<short>(p.y);
map.at<cv::Vec2s>(y, x) = v;
if (r == 0) {
cp = 1.0F;
sp = 0.0F;
} else {
cp = xn / r;
sp = yn / r;
}
}
return map;
th = atanf(r);
th_dist = th;
const float th2 = th * th;
const float th3 = th * th * th;
th_dist += k1 * th3;
th_dist += k2 * th2 * th3;
th_dist += k3 * th2 * th2 * th3;
th_dist += k4 * th3 * th3 * th3;
th_dist += k5 * th3 * th3 * th2 * th3;
th_dist += k6 * th3 * th3 * th2 * th2 * th3;
xn = th_dist * cp;
yn = th_dist * sp;
p.x = fx * xn + cx;
p.y = fy * yn + cy;
return p;
}
cv::Point2f apply_radial_tangential(const CameraCalibration& cam, float xn, float yn) {
cv::Point2f p;
float r, sp, cp;
float th, th_dist;
float dx, dy;
r = sqrtf(xn * xn + yn * yn);
if (r == 0) {
cp = 1.0F;
sp = 0.0F;
} else {
cp = xn / r;
sp = yn / r;
}
th = atanf(r);
th_dist = th;
const float th2 = th * th;
const float th3 = th * th * th;
th_dist += k1 * th3;
th_dist += k2 * th2 * th3;
th_dist += k3 * th2 * th2 * th3;
th_dist += k4 * th3 * th3 * th3;
th_dist += k5 * th3 * th3 * th2 * th3;
th_dist += k6 * th3 * th3 * th2 * th2 * th3;
xn = th_dist * cp;
yn = th_dist * sp;
r = xn * xn + yn * yn;
dx = (2.0F * xn * xn + r) * p1 + 2.0F * xn * yn * p2;
dy = (2.0F * yn * yn + r) * p2 + 2.0F * xn * yn * p1;
xn += dx;
yn += dy;
p.x = fx * xn + cx;
p.y = fy * yn + cy;
return p;
}
cv::Point2f apply_fisheye624(const CameraCalibration& cam, float xn, float yn) {
cv::Point2f p;
float r, sp, cp;
float th, th_dist;
float dx, dy;
r = sqrtf(xn * xn + yn * yn);
if (r == 0) {
cp = 1.0F;
sp = 0.0F;
} else {
cp = xn / r;
sp = yn / r;
}
th = atanf(r);
th_dist = th;
const float th2 = th * th;
const float th3 = th * th * th;
const float th5 = th3 * th2;
const float th6 = th3 * th3;
th_dist += k1 * th3;
th_dist += k2 * th5;
th_dist += k3 * th5 * th2;
th_dist += k4 * th6 * th3;
th_dist += k5 * th6 * th5;
th_dist += k6 * th6 * th5 * th2;
xn = th_dist * cp;
yn = th_dist * sp;
r = xn * xn + yn * yn;
dx = (2.0F * xn * xn + r) * p1 + 2.0F * xn * yn * p2;
dy = (2.0F * yn * yn + r) * p2 + 2.0F * xn * yn * p1;
dx += (s1 + s2 * r) * r;
dy += (s3 + s4 * r) * r;
xn += dx;
yn += dy;
p.x = fx * xn + cx;
p.y = fy * yn + cy;
return p;
}
cv::Mat build_maps_variant(apply_variant_func func, const CameraCalibration& cam) {
cv::Mat map = cv::Mat(cv::Size(cam.resolution.width, cam.resolution.height), CV_16SC2);
cv::Point2f p;
cv::Vec2s v;
size_t x, y;
for (y = 0; y < map.rows; y++) {
for (x = 0; x < map.cols; x++) {
const float xn = (x - cx) / fx;
const float yn = (y - cy) / fy;
p = func(cam, xn, yn);
v[0] = static_cast<short>(p.x);
v[1] = static_cast<short>(p.y);
map.at<cv::Vec2s>(y, x) = v;
}
}
return map;
}
void remap(const cv::Mat &input, cv::Mat &output, const cv::Mat &map) {
cv::Vec2s v;
size_t x, y;
cv::Vec2s v;
size_t x, y;
for (y = 0; y < input.rows; y++) {
for (x = 0; x < input.cols; x++) {
v = map.at<cv::Vec2s>(y, x);
for (y = 0; y < input.rows; y++) {
for (x = 0; x < input.cols; x++) {
v = map.at<cv::Vec2s>(y, x);
output.at<uint8_t>(y, x) = input.at<uint8_t>(v[1], v[0]);
output.at<uint8_t>(y, x) = input.at<uint8_t>(v[1], v[0]);
}
}
}
}
int main(int argc, const char **argv) {
cv::VideoCapture cap;
cv::Mat frame;
CameraCalibration cam [2];
device_imu_type dev;
cv::Ptr<cv::StereoBM> stereo = nullptr; // cv::StereoBM::create(16, 15);
const char *video_filename = "/dev/video0";
cap.open(0, cv::CAP_V4L2);
if (!cap.isOpened()) {
return 2;
}
if (argc > 1) {
video_filename = argv[1];
}
if (DEVICE_IMU_ERROR_NO_ERROR != device_imu_open(&dev, nullptr)) {
return 1;
}
device_imu_clear(&dev);
cap.set(cv::CAP_PROP_FORMAT, -1);
cap.set(cv::CAP_PROP_CONVERT_RGB, false);
size_t num_cameras = device_imu_get_num_of_cameras(&dev);
size_t num_sensors = 0;
cv::Mat img = cv::Mat(cv::Size(CAM_WIDTH, CAM_HEIGHT), CV_8UC1);
cv::Mat left = cv::Mat(cv::Size(CAM_HEIGHT, CAM_WIDTH), CV_8UC1);
cv::Mat right = cv::Mat(cv::Size(CAM_HEIGHT, CAM_WIDTH), CV_8UC1);
if (num_cameras > 0) {
const device_imu_camera_type *camera = device_imu_get_camera(&dev, 0);
cv::Mat map_fisheye624 =
build_maps_variant(apply_fisheye624, CAM_WIDTH, CAM_HEIGHT);
num_sensors = device_imu_camera_get_num_of_sensors(camera);
cv::Mat undist_left = cv::Mat(left.size(), left.type());
cv::Mat undist_right = cv::Mat(right.size(), right.type());
for (size_t i = 0; (i < num_sensors) && (i < 2); i++) {
memset(&(cam[i]), 0, sizeof(CameraCalibration));
while (true) {
cap.read(frame);
if (frame.empty()) {
break;
const device_imu_camera_sensor_type *sensor = device_imu_camera_get_sensor(camera, i);
const device_imu_size_type resolution = device_imu_sensor_get_resolution(sensor);
const device_imu_vec2_type cc = device_imu_sensor_get_cc(sensor);
const device_imu_vec2_type fc = device_imu_sensor_get_fc(sensor);
uint32_t num_kc = 0;
if (DEVICE_IMU_ERROR_NO_ERROR != device_imu_sensor_get_kc(sensor, &num_kc, nullptr)) {
continue;
}
cam[i].resolution.width = resolution.width;
cam[i].resolution.height = resolution.height;
cam[i].fx = fc.x;
cam[i].fy = fc.y;
cam[i].cx = cc.x;
cam[i].cy = cc.y;
cam[i].num_kc = num_kc;
if (num_kc > 12) {
num_kc = 12;
}
device_imu_sensor_get_kc(sensor, &num_kc, cam[i].kc);
}
}
if (frame.cols < CAM_BUFFER_SIZE) {
continue;
device_imu_close(&dev);
if ((cam[0].resolution.width != cam[1].resolution.width) ||
(cam[0].resolution.height != cam[1].resolution.height)) {
return 2;
}
uint8_t *blocks = frame.ptr();
uint8_t *data = img.ptr();
const cv::Size sensor_res (cam[0].resolution.height, cam[0].resolution.width);
const cv::Size camera_res (cam[0].resolution.width, cam[0].resolution.height);
const uint8_t *header = blocks + CAM_IMAGE_DATA_SIZE;
cv::Ptr<cv::StereoBM> stereo = nullptr; // cv::StereoBM::create(16, 15);
uint64_t t_ns = *((const uint64_t *)(header + 0x00));
uint64_t t_ms = *((const uint64_t *)(header + 0x3E));
cv::VideoCapture cap;
cv::Mat frame;
uint16_t seq = *((const uint16_t *)(header + 0x12));
uint8_t is_right = *(header + 0x3B);
size_t offset = 0;
size_t sum = CHUNK_SUM_LEN * 0xFF + 1;
for (size_t i = 0; i < CHUNK_AMOUNT; i++) {
size_t val = 0;
for (size_t j = 0; j < CHUNK_SUM_LEN; j++) {
val += blocks[CHUNK_SIZE * i + j];
}
if (val < sum) {
offset = i;
sum = val;
}
cap.open(video_filename, cv::CAP_V4L2);
if (!cap.isOpened()) {
return 3;
}
for (size_t i = 0; i < CHUNK_AMOUNT; i++) {
if (chunk_map[i] == offset) {
offset = i;
break;
}
cap.set(cv::CAP_PROP_FORMAT, -1);
cap.set(cv::CAP_PROP_CONVERT_RGB, false);
cv::Mat img = cv::Mat(sensor_res, CV_8UC1);
cv::Mat left = cv::Mat(camera_res, img.type());
cv::Mat right = cv::Mat(camera_res, img.type());
cv::Mat map0_fisheye624 = build_maps_variant(apply_fisheye624, cam[0]);
cv::Mat map1_fisheye624 = build_maps_variant(apply_fisheye624, cam[1]);
cv::Mat undist_left = cv::Mat(left.size(), left.type());
cv::Mat undist_right = cv::Mat(right.size(), right.type());
while (true) {
cap.read(frame);
if (frame.empty()) {
break;
}
if (frame.cols < CAM_BUFFER_SIZE) {
continue;
}
uint8_t *blocks = frame.ptr();
uint8_t *data = img.ptr();
const uint8_t *header = blocks + CAM_IMAGE_DATA_SIZE;
uint64_t t_ns = *((const uint64_t *)(header + 0x00));
uint64_t t_ms = *((const uint64_t *)(header + 0x3E));
uint16_t seq = *((const uint16_t *)(header + 0x12));
uint8_t is_right = *(header + 0x3B);
size_t offset = 0;
size_t sum = CHUNK_SUM_LEN * 0xFF + 1;
for (size_t i = 0; i < CHUNK_AMOUNT; i++) {
size_t val = 0;
for (size_t j = 0; j < CHUNK_SUM_LEN; j++) {
val += blocks[CHUNK_SIZE * i + j];
}
if (val < sum) {
offset = i;
sum = val;
}
}
for (size_t i = 0; i < CHUNK_AMOUNT; i++) {
if (chunk_map[i] == offset) {
offset = i;
break;
}
}
for (size_t i = 0; i < CHUNK_AMOUNT; i++) {
size_t src_idx = CHUNK_SIZE * chunk_map[(offset + i) % CHUNK_AMOUNT];
size_t dst_idx = CHUNK_SIZE * i;
std::memcpy(data + dst_idx, blocks + src_idx, CHUNK_SIZE);
}
if (is_right) {
cv::rotate(img, right, cv::ROTATE_90_CLOCKWISE);
} else {
cv::rotate(img, left, cv::ROTATE_90_COUNTERCLOCKWISE);
}
if ((left.empty()) || (right.empty())) {
continue;
}
// cv::remap(left, undist, map_intrinsic, cv::noArray(), cv::INTER_LINEAR);
remap(left, undist_left, map0_fisheye624);
remap(right, undist_right, map1_fisheye624);
if (stereo) {
cv::Mat disparity;
cv::Mat disp8;
stereo->compute(undist_left, undist_right, disparity);
disparity.convertTo(disp8, CV_8U, 255.0f / (16.0f * 16.0f));
cv::imshow("Live", disp8);
} else {
cv::Mat row;
cv::hconcat(left, right, row);
cv::hconcat(undist_left, undist_right, row);
cv::imshow("Live", row);
}
if (cv::waitKey(5) >= 0) {
break;
}
}
for (size_t i = 0; i < CHUNK_AMOUNT; i++) {
size_t src_idx = CHUNK_SIZE * chunk_map[(offset + i) % CHUNK_AMOUNT];
size_t dst_idx = CHUNK_SIZE * i;
cap.release();
cv::destroyAllWindows();
std::memcpy(data + dst_idx, blocks + src_idx, CHUNK_SIZE);
}
if (is_right) {
cv::rotate(img, right, cv::ROTATE_90_CLOCKWISE);
} else {
cv::rotate(img, left, cv::ROTATE_90_COUNTERCLOCKWISE);
}
if ((left.empty()) || (right.empty())) {
continue;
}
// cv::remap(left, undist, map_intrinsic, cv::noArray(), cv::INTER_LINEAR);
remap(left, undist_left, map_fisheye624);
remap(right, undist_right, map_fisheye624);
if (stereo) {
cv::Mat disparity;
cv::Mat disp8;
stereo->compute(undist_left, undist_right, disparity);
disparity.convertTo(disp8, CV_8U, 255.0f / (16.0f * 16.0f));
cv::imshow("Live", disp8);
} else {
cv::Mat row;
cv::hconcat(left, right, row);
cv::hconcat(undist_left, undist_right, row);
cv::imshow("Live", row);
}
if (cv::waitKey(5) >= 0) {
break;
}
}
cap.release();
cv::destroyAllWindows();
return 0;
return 0;
}

View file

@ -2,7 +2,7 @@
//
// Created by thejackimonster on 30.03.23.
//
// Copyright (c) 2023-2024 thejackimonster. All rights reserved.
// Copyright (c) 2023-2025 thejackimonster. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
@ -95,8 +95,16 @@ enum device_imu_event_t {
};
struct device_imu_ahrs_t;
struct device_imu_camera_sensor_t;
struct device_imu_camera_t;
struct device_imu_camera_calibration_t;
struct device_imu_calibration_t;
struct device_imu_vec2_t {
float x;
float y;
};
struct device_imu_vec3_t {
float x;
float y;
@ -116,16 +124,27 @@ struct device_imu_euler_t {
float yaw;
};
struct device_imu_size_t {
uint16_t width;
uint16_t height;
};
typedef enum device_imu_error_t device_imu_error_type;
typedef struct device_imu_packet_t device_imu_packet_type;
typedef enum device_imu_event_t device_imu_event_type;
typedef struct device_imu_ahrs_t device_imu_ahrs_type;
typedef struct device_imu_camera_sensor_t device_imu_camera_sensor_type;
typedef struct device_imu_camera_t device_imu_camera_type;
typedef struct device_imu_camera_calibration_t device_imu_camera_calibration_type;
typedef struct device_imu_calibration_t device_imu_calibration_type;
typedef struct device_imu_vec2_t device_imu_vec2_type;
typedef struct device_imu_vec3_t device_imu_vec3_type;
typedef struct device_imu_quat_t device_imu_quat_type;
typedef struct device_imu_euler_t device_imu_euler_type;
typedef struct device_imu_size_t device_imu_size_type;
typedef void (*device_imu_event_callback)(
uint64_t timestamp,
@ -176,6 +195,22 @@ device_imu_quat_type device_imu_get_orientation(const device_imu_ahrs_type* ahrs
device_imu_euler_type device_imu_get_euler(device_imu_quat_type quat);
uint32_t device_imu_get_num_of_cameras(device_imu_type *device);
const device_imu_camera_type* device_imu_get_camera(const device_imu_type *device, uint32_t index);
uint32_t device_imu_camera_get_num_of_sensors(const device_imu_camera_type *camera);
const device_imu_camera_sensor_type* device_imu_camera_get_sensor(const device_imu_camera_type *camera, uint32_t index);
device_imu_size_type device_imu_sensor_get_resolution(const device_imu_camera_sensor_type *sensor);
device_imu_vec2_type device_imu_sensor_get_cc(const device_imu_camera_sensor_type *sensor);
device_imu_vec2_type device_imu_sensor_get_fc(const device_imu_camera_sensor_type *sensor);
device_imu_error_type device_imu_sensor_get_kc(const device_imu_camera_sensor_type *sensor, uint32_t *num_kc, float *kc);
device_imu_error_type device_imu_close(device_imu_type* device);
#ifdef __cplusplus

View file

@ -1,7 +1,7 @@
//
// Created by thejackimonster on 30.03.23.
//
// Copyright (c) 2023-2024 thejackimonster. All rights reserved.
// Copyright (c) 2023-2025 thejackimonster. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
@ -28,6 +28,9 @@
#include <Fusion/FusionAxes.h>
#include <Fusion/FusionMath.h>
#include <float.h>
#include <json-c/json_object.h>
#include <json-c/json_types.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
@ -48,6 +51,29 @@
#define device_imu_error(msg) (0)
#endif
struct device_imu_camera_sensor_t {
FusionMatrix cameraMisalignment;
FusionVector cameraOffset;
uint16_t resolution [2];
float cc [2];
float fc [2];
uint32_t num_kc;
float* kc;
};
struct device_imu_camera_t {
uint32_t num_sensors;
device_imu_camera_sensor_type* sensors;
};
struct device_imu_camera_calibration_t {
uint32_t num_cameras;
device_imu_camera_type *cameras;
};
struct device_imu_calibration_t {
FusionMatrix gyroscopeMisalignment;
FusionVector gyroscopeSensitivity;
@ -65,6 +91,8 @@ struct device_imu_calibration_t {
FusionVector hardIronOffset;
FusionQuaternion noises;
device_imu_camera_calibration_type cam;
};
static bool send_payload(device_imu_type* device, uint16_t size, const uint8_t* payload) {
@ -189,6 +217,87 @@ static FusionQuaternion json_object_get_quaternion(struct json_object* obj) {
return quaternion;
}
static uint32_t json_object_get_array_f32(struct json_object* obj, float** array, uint32_t n) {
if ((!json_object_is_type(obj, json_type_array)) ||
((n > 0) && (json_object_array_length(obj) != n))) {
return 0;
}
if (n == 0) {
n = json_object_array_length(obj);
*array = malloc(sizeof(float) * n);
}
for (uint32_t i = 0; i < n; i++) {
(*array)[i] = (float) json_object_get_double(json_object_array_get_idx(obj, i));
}
return n;
}
static uint32_t json_object_get_array_u16(struct json_object* obj, uint16_t** array, uint32_t n) {
if ((!json_object_is_type(obj, json_type_array)) ||
((n > 0) && (json_object_array_length(obj) != n))) {
return 0;
}
if (n == 0) {
n = json_object_array_length(obj);
*array = malloc(sizeof(uint16_t) * n);
}
for (uint32_t i = 0; i < n; i++) {
(*array)[i] = (uint16_t) json_object_get_int(json_object_array_get_idx(obj, i));
}
return n;
}
static void init_device_imu_camera(device_imu_camera_type *camera, json_object *cam) {
uint32_t num_sensors = json_object_get_int(json_object_object_get(cam, "num_of_cameras"));
device_imu_camera_sensor_type *sensors = NULL;
if (num_sensors > 0) {
sensors = malloc(sizeof(device_imu_camera_sensor_type) * num_sensors);
}
if (!sensors) {
num_sensors = 0;
}
for (uint32_t n = 0; n < num_sensors; n++) {
device_imu_camera_sensor_type *sensor = &(sensors[n]);
char device_name [64];
snprintf(device_name, 64, "device_%u", (n + 1));
struct json_object* dev = json_object_object_get(cam, device_name);
FusionQuaternion imu_q_cam = json_object_get_quaternion(json_object_object_get(dev, "imu_q_cam"));
FusionVector cam_offset = json_object_get_vector(json_object_object_get(dev, "imu_p_cam"));
sensor->cameraMisalignment = FusionQuaternionToMatrix(imu_q_cam);
sensor->cameraOffset = cam_offset;
uint16_t* resolution = sensor->resolution;
float* cc = sensor->cc;
float* fc = sensor->fc;
float* kc = sensor->kc;
json_object_get_array_u16(json_object_object_get(dev, "resolution"), &resolution, 2);
json_object_get_array_f32(json_object_object_get(dev, "cc"), &cc, 2);
json_object_get_array_f32(json_object_object_get(dev, "fc"), &fc, 2);
sensor->num_kc = json_object_get_array_f32(json_object_object_get(dev, "kc"), &kc, 0);
sensor->kc = kc;
}
camera->num_sensors = num_sensors;
camera->sensors = sensors;
}
device_imu_error_type device_imu_open(device_imu_type* device, device_imu_event_callback callback) {
if (!device) {
device_imu_error("No device");
@ -254,6 +363,8 @@ device_imu_error_type device_imu_open(device_imu_type* device, device_imu_event_
}
device->calibration = malloc(sizeof(device_imu_calibration_type));
memset(device->calibration, 0, sizeof(device_imu_calibration_type));
device_imu_reset_calibration(device);
if (!send_payload_msg(device, DEVICE_IMU_MSG_GET_CAL_DATA_LENGTH, 0, NULL)) {
@ -315,6 +426,32 @@ device_imu_error_type device_imu_open(device_imu_type* device, device_imu_event_
device->calibration->magnetometerOffset = mag_bias;
device->calibration->noises = imu_noises;
struct json_object* rgb = json_object_object_get(root, "RGB_camera");
struct json_object* slam = json_object_object_get(root, "SLAM_camera");
uint32_t num_cameras_rgb = json_object_get_int(json_object_object_get(rgb, "num_of_cameras"));
uint32_t num_cameras_slam = json_object_get_int(json_object_object_get(slam, "num_of_cameras"));
const uint32_t num_cameras = (num_cameras_rgb > 0? 1 : 0) + (num_cameras_slam > 0? 1 : 0);
device_imu_camera_type *cameras = NULL;
uint32_t camera_index = 0;
if (num_cameras > 0) {
cameras = malloc(sizeof(device_imu_camera_type) * num_cameras);
}
if ((cameras) && (num_cameras_rgb > 0)) {
init_device_imu_camera(&(cameras[camera_index++]), rgb);
}
if ((cameras) && (num_cameras_slam > 0)) {
init_device_imu_camera(&(cameras[camera_index++]), slam);
}
device->calibration->cam.num_cameras = cameras? num_cameras : 0;
device->calibration->cam.cameras = cameras;
json_tokener_free(tokener);
free(calibration_data);
@ -377,6 +514,35 @@ device_imu_error_type device_imu_reset_calibration(device_imu_type* device) {
device->calibration->noises = FUSION_IDENTITY_QUATERNION;
device->calibration->noises.element.w = 0.0f;
if (device->calibration->cam.cameras) {
for (uint32_t i = 0; i < device->calibration->cam.num_cameras; i++) {
if (!device->calibration->cam.cameras[i].sensors) {
continue;
}
for (uint32_t j = 0; j < device->calibration->cam.cameras[i].num_sensors; j++) {
if (!device->calibration->cam.cameras[i].sensors[j].kc) {
continue;
}
free(device->calibration->cam.cameras[i].sensors[j].kc);
device->calibration->cam.cameras[i].sensors[j].num_kc = 0;
device->calibration->cam.cameras[i].sensors[j].kc = NULL;
}
free(device->calibration->cam.cameras[i].sensors);
device->calibration->cam.cameras[i].num_sensors = 0;
device->calibration->cam.cameras[i].sensors = NULL;
}
free(device->calibration->cam.cameras);
}
device->calibration->cam.num_cameras = 0;
device->calibration->cam.cameras = NULL;
}
device_imu_error_type device_imu_load_calibration(device_imu_type* device, const char* path) {
@ -397,11 +563,14 @@ device_imu_error_type device_imu_load_calibration(device_imu_type* device, const
}
device_imu_error_type result = DEVICE_IMU_ERROR_NO_ERROR;
const size_t calibration_size = (
sizeof(device_imu_calibration_type) - sizeof(device_imu_camera_calibration_type)
);
size_t count;
count = fread(device->calibration, 1, sizeof(device_imu_calibration_type), file);
count = fread(device->calibration, 1, calibration_size, file);
if (sizeof(device_imu_calibration_type) != count) {
if (calibration_size != count) {
device_imu_error("Not fully loaded");
result = DEVICE_IMU_ERROR_LOADING_FAILED;
}
@ -432,11 +601,14 @@ device_imu_error_type device_imu_save_calibration(device_imu_type* device, const
}
device_imu_error_type result = DEVICE_IMU_ERROR_NO_ERROR;
const size_t calibration_size = (
sizeof(device_imu_calibration_type) - sizeof(device_imu_camera_calibration_type)
);
size_t count;
count = fwrite(device->calibration, 1, sizeof(device_imu_calibration_type), file);
count = fwrite(device->calibration, 1, calibration_size, file);
if (sizeof(device_imu_calibration_type) != count) {
if (calibration_size != count) {
device_imu_error("Not fully saved");
result = DEVICE_IMU_ERROR_SAVING_FAILED;
}
@ -948,6 +1120,82 @@ device_imu_euler_type device_imu_get_euler(device_imu_quat_type quat) {
return e;
}
uint32_t device_imu_get_num_of_cameras(device_imu_type *device) {
if (!device->calibration) {
return 0;
}
return device->calibration->cam.num_cameras;
}
const device_imu_camera_type* device_imu_get_camera(const device_imu_type *device, uint32_t index) {
if ((!device->calibration) || (!device->calibration->cam.cameras)) {
return NULL;
}
return &(device->calibration->cam.cameras[index]);
}
uint32_t device_imu_camera_get_num_of_sensors(const device_imu_camera_type *camera) {
if (!camera) {
return 0;
}
return camera->num_sensors;
}
const device_imu_camera_sensor_type* device_imu_camera_get_sensor(const device_imu_camera_type *camera, uint32_t index) {
if (!camera->sensors) {
return NULL;
}
return &(camera->sensors[index]);
}
device_imu_size_type device_imu_sensor_get_resolution(const device_imu_camera_sensor_type *sensor) {
device_imu_size_type resolution;
resolution.width = sensor->resolution[0];
resolution.height = sensor->resolution[1];
return resolution;
}
device_imu_vec2_type device_imu_sensor_get_cc(const device_imu_camera_sensor_type *sensor) {
device_imu_vec2_type cc;
cc.x = sensor->cc[0];
cc.y = sensor->cc[1];
return cc;
}
device_imu_vec2_type device_imu_sensor_get_fc(const device_imu_camera_sensor_type *sensor) {
device_imu_vec2_type fc;
fc.x = sensor->fc[0];
fc.y = sensor->fc[1];
return fc;
}
device_imu_error_type device_imu_sensor_get_kc(const device_imu_camera_sensor_type *sensor, uint32_t *num_kc, float *kc) {
if ((!sensor) || (!num_kc)) {
device_imu_error("Wrong argument");
return DEVICE_IMU_ERROR_NO_ALLOCATION;
}
if (!kc) {
*num_kc = sensor->num_kc;
} else {
uint32_t n = *num_kc;
if (sensor->num_kc < n) {
n = sensor->num_kc;
}
for (uint32_t i = 0; i < *num_kc; i++) {
kc[i] = sensor->kc[i];
}
}
return DEVICE_IMU_ERROR_NO_ERROR;
}
device_imu_error_type device_imu_close(device_imu_type* device) {
if (!device) {
device_imu_error("No device");
@ -955,6 +1203,26 @@ device_imu_error_type device_imu_close(device_imu_type* device) {
}
if (device->calibration) {
if (device->calibration->cam.cameras) {
for (uint32_t i = 0; i < device->calibration->cam.num_cameras; i++) {
if (!device->calibration->cam.cameras[i].sensors) {
continue;
}
for (uint32_t j = 0; j < device->calibration->cam.cameras[i].num_sensors; j++) {
if (!device->calibration->cam.cameras[i].sensors[j].kc) {
continue;
}
free(device->calibration->cam.cameras[i].sensors[j].kc);
}
free(device->calibration->cam.cameras[i].sensors);
}
free(device->calibration->cam.cameras);
}
free(device->calibration);
}