基本思想:不太懂IMU是干嘛的,不像图像那么容易可视化,参考官方demo的,记录一下,后续这篇需要补充,参考的IMU的介绍,原理不懂,先占个坑,继续学习官方的知乎
IMU Sensors: Everything You Need To Know! – Embedded Inventor
cmakelists.txt
cmake_minimum_required(VERSION 3.16)
project(depthai)
set(CMAKE_CXX_STANDARD 11)
find_package(OpenCV REQUIRED)
#message(STATUS ${OpenCV_INCLUDE_DIRS})
#添加头文件
include_directories(${OpenCV_INCLUDE_DIRS})
include_directories(${CMAKE_SOURCE_DIR}/include)
include_directories(${CMAKE_SOURCE_DIR}/include/utility)
#链接Opencv库
find_package(depthai CONFIG REQUIRED)
add_executable(depthai main.cpp include/utility/utility.cpp)
target_link_libraries(depthai ${OpenCV_LIBS} depthai::opencv)
main.cpp 参考官方demo,一步步学,一步步创新
#include <chrono>
#include "utility.hpp"
#include "depthai/depthai.hpp"
using namespace std::chrono;
int main(int argc, char** argv) {
dai::Pipeline pipeline;
//定义imu
auto imu=pipeline.create<dai::node::IMU>();
// enable ACCELEROMETER_RAW at 500 hz rate
imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, 500);
// enable GYROSCOPE_RAW at 400 hz rate
imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, 400);
// if lower or equal to batchReportThreshold then the sending is always blocking on device
// useful to reduce device's CPU load and number of lost packets, if CPU load is high on device side due to multiple nodes
imu->setMaxBatchReports(10);
//定义输出
auto xlinkoutpreviewOut=pipeline.create<dai::node::XLinkOut>();
xlinkoutpreviewOut->setStreamName("imu");
imu->out.link(xlinkoutpreviewOut->input);
//结构推送相机
dai::Device device(pipeline);
//取帧显示
auto outqueue=device.getOutputQueue("imu",8, false);//maxsize 代表缓冲数据
bool firstTs = false;
auto baseTs = std::chrono::time_point<std::chrono::steady_clock, std::chrono::steady_clock::duration>();
while(1){
auto imuData = outqueue->get<dai::IMUData>();
auto imuPackets = imuData->packets;
for(auto& imuPacket : imuPackets) {
auto& acceleroValues = imuPacket.acceleroMeter;
auto& gyroValues = imuPacket.gyroscope;
auto acceleroTs1 = acceleroValues.timestamp.get();
auto gyroTs1 = gyroValues.timestamp.get();
if(!firstTs) {
baseTs = std::min(acceleroTs1, gyroTs1);
firstTs = true;
}
auto acceleroTs = acceleroTs1 - baseTs;
auto gyroTs = gyroTs1 - baseTs;
printf("Accelerometer timestamp: %ld ms\n", static_cast<long>(duration_cast<milliseconds>(acceleroTs).count()));
printf("Accelerometer [m/s^2]: x: %.3f y: %.3f z: %.3f \n", acceleroValues.x, acceleroValues.y, acceleroValues.z);
printf("Gyroscope timestamp: %ld ms\n", static_cast<long>(duration_cast<milliseconds>(gyroTs).count()));
printf("Gyroscope [rad/s]: x: %.3f y: %.3f z: %.3f \n", gyroValues.x, gyroValues.y, gyroValues.z);
}
cv::waitKey(1);
}
return 0;
}
这个代码不太懂输出的信息,如何使用
/home/ubuntu/untitled9/cmake-build-debug/depthai
Accelerometer timestamp: 0 ms
Accelerometer [m/s^2]: x: 0.345 y: -0.259 z: -10.132
Gyroscope timestamp: 1 ms
Gyroscope [rad/s]: x: -0.001 y: -0.010 z: 0.009
Accelerometer timestamp: 3 ms
Accelerometer [m/s^2]: x: 0.335 y: -0.268 z: -10.123
Gyroscope timestamp: 3 ms
Gyroscope [rad/s]: x: -0.003 y: 0.004 z: 0.004
Accelerometer timestamp: 5 ms
Accelerometer [m/s^2]: x: 0.335 y: -0.287 z: -10.151
二、SpatialLocationCalculator节点
cmakelists.txt
cmake_minimum_required(VERSION 3.16)
project(depthai)
set(CMAKE_CXX_STANDARD 11)
find_package(OpenCV REQUIRED)
#message(STATUS ${OpenCV_INCLUDE_DIRS})
#添加头文件
include_directories(${OpenCV_INCLUDE_DIRS})
include_directories(${CMAKE_SOURCE_DIR}/include)
include_directories(${CMAKE_SOURCE_DIR}/include/utility)
#链接Opencv库
find_package(depthai CONFIG REQUIRED)
add_executable(depthai main.cpp include/utility/utility.cpp)
target_link_libraries(depthai ${OpenCV_LIBS} depthai::opencv)
main.cpp
#include <chrono>
#include "utility.hpp"
#include "depthai/depthai.hpp"
using namespace std::chrono;
int main(int argc, char** argv) {
dai::Pipeline pipeline;
//定义
auto cam=pipeline.create<dai::node::ColorCamera>();
cam->setBoardSocket(dai::CameraBoardSocket::RGB);
auto spa=pipeline.create<dai::node::SpatialLocationCalculator>();
auto left=pipeline.create<dai::node::MonoCamera>();
left->setBoardSocket(dai::CameraBoardSocket::LEFT);
left->setResolution(dai::MonoCameraProperties::SensorResolution::THE_800_P);
auto right=pipeline.create<dai::node::MonoCamera>();
right->setBoardSocket(dai::CameraBoardSocket::RIGHT);
right->setResolution(dai::MonoCameraProperties::SensorResolution::THE_800_P);
auto stero=pipeline.create<dai::node::StereoDepth>();
stero->setLeftRightCheck(true);
auto conf=pipeline.create<dai::node::XLinkIn>();
conf->setStreamName("config");
left->out.link(stero->left);
right->out.link(stero->right);
//定义输出
auto xlinkoutpreviewOut=pipeline.create<dai::node::XLinkOut>();
xlinkoutpreviewOut->setStreamName("out");
auto xlinkoutpdepthOut=pipeline.create<dai::node::XLinkOut>();
xlinkoutpdepthOut->setStreamName("depthout");
stero->depth.link(spa->inputDepth);
conf->out.link(spa->inputConfig);
spa->out.link(xlinkoutpreviewOut->input);
spa->passthroughDepth.link(xlinkoutpdepthOut->input);
//结构推送相机
dai::Device device(pipeline);
//取帧显示
auto outqueue=device.getOutputQueue("out",8, false);//maxsize 代表缓冲数据
auto depthout=device.getOutputQueue("depthout",8, false);//maxsize 代表缓冲数据
auto outconfig=device.getInputQueue("config");
dai::SpatialLocationCalculatorConfig cf;
dai::SpatialLocationCalculatorConfigThresholds score;
score.lowerThreshold=100;
score.upperThreshold=1000;
std::vector<dai::SpatialLocationCalculatorConfigData> ROIs;
ROIs.push_back({dai::Rect(200,300,40,40),score});
cf.setROIs(ROIs);
outconfig->send(cf);
while(1){
auto Depthframe=depthout->get<dai::ImgFrame>();
auto Depthimage=Depthframe->getCvFrame();
cv::Mat depthFrameColor;
cv::normalize(Depthimage, depthFrameColor, 255, 0, cv::NORM_INF, CV_8UC1);
cv::equalizeHist(depthFrameColor, depthFrameColor);
cv::applyColorMap(depthFrameColor, depthFrameColor, cv::COLORMAP_COOL);
auto spatialData = outqueue->get<dai::SpatialLocationCalculatorData>()->getSpatialLocations();
for(auto depthData : spatialData) {
auto roi = depthData.config.roi;
roi = roi.denormalize(Depthimage.cols, Depthimage.rows);
auto xmin = (int)roi.topLeft().x;
auto ymin = (int)roi.topLeft().y;
auto xmax = (int)roi.bottomRight().x;
auto ymax = (int)roi.bottomRight().y;
auto depthMin = depthData.depthMin;
auto depthMax = depthData.depthMax;
cv::rectangle(depthFrameColor, cv::Rect(cv::Point(xmin, ymin), cv::Point(xmax, ymax)), cv::Scalar(255,255,255), cv::FONT_HERSHEY_SIMPLEX);
std::stringstream depthX;
depthX << "X: " << (int)depthData.spatialCoordinates.x << " mm";
cv::putText(depthFrameColor, depthX.str(), cv::Point(xmin + 10, ymin + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, cv::Scalar(255,255,255));
std::stringstream depthY;
depthY << "Y: " << (int)depthData.spatialCoordinates.y << " mm";
cv::putText(depthFrameColor, depthY.str(), cv::Point(xmin + 10, ymin + 35), cv::FONT_HERSHEY_TRIPLEX, 0.5, cv::Scalar(255,255,255));
std::stringstream depthZ;
depthZ << "Z: " << (int)depthData.spatialCoordinates.z << " mm";
cv::putText(depthFrameColor, depthZ.str(), cv::Point(xmin + 10, ymin + 50), cv::FONT_HERSHEY_TRIPLEX, 0.5, cv::Scalar(255,255,255));
}
//auto Depthframe=outqueue->get<dai::ImgFrame>();
// auto Depthimage=Depthframe->getCvFrame();
cv::imshow("depthFrameColor",depthFrameColor);
cv::imwrite("depthFrameColor.jpg",depthFrameColor);
cv::waitKey(1);
}
return 0;
}
测试结果
参考
Nodes — DepthAI documentation | Luxonis
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)