del subprojects

This commit is contained in:
Joey 2024-03-27 03:45:53 +08:00
parent b2e9c843a4
commit e3f3bf4071
121 changed files with 15852 additions and 5 deletions

@ -1 +0,0 @@
Subproject commit 5206f172c2de996ae3dcdbe42f0cbedca3e2aa70

View File

@ -0,0 +1,18 @@
---
Language: Cpp
BasedOnStyle: Google
AccessModifierOffset: -2
AlignAfterOpenBracket: AlwaysBreak
BraceWrapping:
AfterClass: true
AfterFunction: true
AfterNamespace: true
AfterStruct: true
BreakBeforeBraces: Custom
ColumnLimit: 100
ConstructorInitializerIndentWidth: 0
ContinuationIndentWidth: 2
DerivePointerAlignment: false
PointerAlignment: Middle
ReflowComments: false

View File

@ -0,0 +1,55 @@
---
Checks: '-*,
performance-*,
-performance-unnecessary-value-param,
llvm-namespace-comment,
modernize-redundant-void-arg,
modernize-use-nullptr,
modernize-use-default,
modernize-use-override,
modernize-loop-convert,
modernize-make-shared,
modernize-make-unique,
misc-unused-parameters,
readability-named-parameter,
readability-redundant-smartptr-get,
readability-redundant-string-cstr,
readability-simplify-boolean-expr,
readability-container-size-empty,
readability-identifier-naming,
'
HeaderFilterRegex: ''
AnalyzeTemporaryDtors: false
CheckOptions:
- key: llvm-namespace-comment.ShortNamespaceLines
value: '10'
- key: llvm-namespace-comment.SpacesBeforeComments
value: '2'
- key: misc-unused-parameters.StrictMode
value: '1'
- key: readability-braces-around-statements.ShortStatementLines
value: '2'
# type names
- key: readability-identifier-naming.ClassCase
value: CamelCase
- key: readability-identifier-naming.EnumCase
value: CamelCase
- key: readability-identifier-naming.UnionCase
value: CamelCase
# method names
- key: readability-identifier-naming.MethodCase
value: camelBack
# variable names
- key: readability-identifier-naming.VariableCase
value: lower_case
- key: readability-identifier-naming.ClassMemberSuffix
value: '_'
# const static or global variables are UPPER_CASE
- key: readability-identifier-naming.EnumConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.StaticConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.ClassConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.GlobalVariableCase
value: UPPER_CASE

View File

@ -0,0 +1,30 @@
name: Build and Test (humble)
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build-and-test:
runs-on: ubuntu-22.04
steps:
- name: Setup ROS 2
uses: ros-tooling/setup-ros@v0.4
with:
required-ros-distributions: humble
- name: Build rm_auto_aim
uses: ros-tooling/action-ros-ci@v0.2
with:
package-name: rm_auto_aim
target-ros2-distro: humble
skip-tests: true
- name: Test armor_detector
run: |
source /opt/ros/humble/setup.sh
cd ros_ws
colcon test --packages-select armor_detector --event-handlers console_cohesion+ --return-code-on-test-failure
- name: Test armor_tracker
run: |
source /opt/ros/humble/setup.sh
cd ros_ws
colcon test --packages-select armor_tracker --event-handlers console_cohesion+ --return-code-on-test-failure

170
src/rm_auto_aim/.gitignore vendored Normal file
View File

@ -0,0 +1,170 @@
# Created by https://www.gitignore.io/api/c++,linux,macos,clion
# Edit at https://www.gitignore.io/?templates=c++,linux,macos,clion
### C++ ###
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
### CLion ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### CLion Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
.idea/**/sonarlint/
# SonarQube Plugin
.idea/**/sonarIssues.xml
# Markdown Navigator plugin
.idea/**/markdown-navigator.xml
.idea/**/markdown-navigator/
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# End of https://www.gitignore.io/api/c++,linux,macos,clion

21
src/rm_auto_aim/LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 ChenJun
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

61
src/rm_auto_aim/README.md Normal file
View File

@ -0,0 +1,61 @@
# rm_auto_aim
## Overview
RoboMaster 装甲板自瞄算法模块
<img src="docs/rm_vision.svg" alt="rm_vision" width="200" height="200">
该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块
若有帮助请Star这个项目感谢~
### License
The source code is released under a [MIT license](rm_auto_aim/LICENSE).
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
Author: Chen Jun
运行环境Ubuntu 22.04 / ROS2 Humble (未在其他环境下测试)
![Build Status](https://github.com/chenjunnn/rm_auto_aim/actions/workflows/ros_ci.yml/badge.svg)
## Building from Source
### Building
在 Ubuntu 22.04 环境下安装 [ROS2 Humble](https://docs.ros.org/en/humble/Installation/Ubuntu-Install-Debians.html)
创建 ROS 工作空间后 clone 项目,使用 rosdep 安装依赖后编译代码
cd ros_ws/src
git clone https://github.com/chenjunnn/rm_auto_aim.git
cd ..
rosdep install --from-paths src --ignore-src -r -y
colcon build --symlink-install --packages-up-to auto_aim_bringup
### Testing
Run the tests with
colcon test --packages-up-to auto_aim_bringup
## Packages
- [armor_detector](armor_detector)
订阅相机参数及图像流进行装甲板的识别并解算三维位置输出识别到的装甲板在输入frame下的三维位置 (一般是以相机光心为原点的相机坐标系)
- [armor_tracker](armor_tracker)
订阅识别节点发布的装甲板三维位置及机器人的坐标转换信息将装甲板三维位置变换到指定惯性系一般是以云台中心为原点IMU 上电时的 Yaw 朝向为 X 轴的惯性系)下,然后将装甲板目标送入跟踪器中,输出跟踪机器人在指定惯性系下的状态
- auto_aim_interfaces
定义了识别节点和处理节点的接口以及定义了用于 Debug 的信息
- auto_aim_bringup
包含启动识别节点和处理节点的默认参数文件及 launch 文件

View File

@ -0,0 +1,8 @@
# 默认忽略的文件
/shelf/
/workspace.xml
# 基于编辑器的 HTTP 客户端请求
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,2 @@
<?xml version="1.0" encoding="UTF-8"?>
<module classpath="CMake" type="CPP_MODULE" version="4" />

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CMakeWorkspace" PROJECT_DIR="$PROJECT_DIR$" />
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/armor_detector.iml" filepath="$PROJECT_DIR$/.idea/armor_detector.iml" />
</modules>
</component>
</project>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@ -0,0 +1,68 @@
cmake_minimum_required(VERSION 3.10)
project(armor_detector)
## Use C++14
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
## By adding -Wall and -Werror, the compiler does not ignore warnings anymore,
## enforcing cleaner code.
add_definitions(-Wall -Werror)
## Export compile commands for clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
#######################
## Find dependencies ##
#######################
find_package(ament_cmake_auto REQUIRED)
find_package(OpenCV REQUIRED)
ament_auto_find_build_dependencies()
###########
## Build ##
###########
ament_auto_add_library(${PROJECT_NAME} SHARED
DIRECTORY src
)
target_include_directories(${PROJECT_NAME} PUBLIC ${OpenCV_INCLUDE_DIRS})
target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS})
rclcpp_components_register_node(${PROJECT_NAME}
PLUGIN rm_auto_aim::ArmorDetectorNode
EXECUTABLE armor_detector_node
)
#############
## Testing ##
#############
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
list(APPEND AMENT_LINT_AUTO_EXCLUDE
ament_cmake_copyright
ament_cmake_uncrustify
ament_cmake_cpplint
)
ament_lint_auto_find_test_dependencies()
find_package(ament_cmake_gtest)
ament_add_gtest(test_node_startup test/test_node_startup.cpp)
target_link_libraries(test_node_startup ${PROJECT_NAME})
ament_add_gtest(test_number_cls test/test_number_cls.cpp)
target_link_libraries(test_number_cls ${PROJECT_NAME})
endif()
#############
## Install ##
#############
ament_auto_package(
INSTALL_TO_SHARE
model
)

View File

@ -0,0 +1,104 @@
# armor_detector
- [DetectorNode](#basedetectornode)
- [Detector](#detector)
- [NumberClassifier](#numberclassifier)
- [PnPSolver](#pnpsolver)
## 识别节点
订阅相机参数及图像流进行装甲板的识别并解算三维位置输出识别到的装甲板在输入frame下的三维位置 (一般是以相机光心为原点的相机坐标系)
### DetectorNode
装甲板识别节点
包含[Detector](#detector)
包含[PnPSolver](#pnpsolver)
订阅:
- 相机参数 `/camera_info`
- 彩色图像 `/image_raw`
发布:
- 识别目标 `/detector/armors`
静态参数:
- 筛选灯条的参数 `light`
- 长宽比范围 `min/max_ratio`
- 最大倾斜角度 `max_angle`
- 筛选灯条配对结果的参数 `armor`
- 两灯条的最小长度之比(短边/长边)`min_light_ratio `
- 装甲板两灯条中心的距离范围(大装甲板)`min/max_large_center_distance`
- 装甲板两灯条中心的距离范围(小装甲板)`min/max_small_center_distance`
- 装甲板的最大倾斜角度 `max_angle`
动态参数:
- 是否发布 debug 信息 `debug`
- 识别目标颜色 `detect_color`
- 二值化的最小阈值 `binary_thres`
- 数字分类器 `classifier`
- 置信度阈值 `threshold`
## Detector
装甲板识别器
### preprocessImage
预处理
| ![](docs/raw.png) | ![](docs/hsv_bin.png) | ![](docs/gray_bin.png) |
| :---------------: | :-------------------: | :--------------------: |
| 原图 | 通过颜色二值化 | 通过灰度二值化 |
由于一般工业相机的动态范围不够大,导致若要能够清晰分辨装甲板的数字,得到的相机图像中灯条中心就会过曝,灯条中心的像素点的值往往都是 R=B。根据颜色信息来进行二值化效果不佳因此此处选择了直接通过灰度图进行二值化将灯条的颜色判断放到后续处理中。
### findLights
寻找灯条
通过 findContours 得到轮廓,再通过 minAreaRect 获得最小外接矩形,对其进行长宽比和倾斜角度的判断,可以高效的筛除形状不满足的亮斑。
判断灯条颜色这里采用了对轮廓内的的R/B值求和判断两和的的大小的方法`sum_r > sum_b` 则认为是红色灯条,反之则认为是蓝色灯条。
| ![](docs/red.png) | ![](docs/blue.png) |
| :---------------: | :----------------: |
| 提取出的红色灯条 | 提取出的蓝色灯条 |
### matchLights
配对灯条
根据 `detect_color` 选择对应颜色的灯条进行两两配对,首先筛除掉两条灯条中间包含另一个灯条的情况,然后根据两灯条的长度之比、两灯条中心的距离、配对出装甲板的倾斜角度来筛选掉条件不满足的结果,得到形状符合装甲板特征的灯条配对。
## NumberClassifier
数字分类器
### extractNumbers
提取数字
| ![](docs/num_raw.png) | ![](docs/num_warp.png) | ![](docs/num_roi.png) | ![](docs/num_bin.png) |
| :-------------------: | :--------------------: | :-------------------: | :-------------------: |
| 原图 | 透视变换 | 取ROI | 二值化 |
将每条灯条上下的角点拉伸到装甲板的上下边缘作为待变换点进行透视变换再对变换后的图像取ROI。考虑到数字图案实质上就是黑色背景+白色图案,所以此处使用了大津法进行二值化。
### Classify
分类
由于上一步对于数字的提取效果已经非常好数字图案的特征非常清晰明显装甲板的远近、旋转都不会使图案产生过多畸变且图案像素点少所以我们使用多层感知机MLP进行分类。
网络结构中定义了两个隐藏层和一个分类层,将二值化后的数字展平成 20x28=560 维的输入,送入网络进行分类。
网络结构:
![](docs/model.svg)
<!-- 效果图: -->
<!-- ![](docs/result.png) -->
## PnPSolver
PnP解算器
[Perspective-n-Point (PnP) pose computation](https://docs.opencv.org/4.x/d5/d1f/calib3d_solvePnP.html)
PnP解算器将 `cv::solvePnP()` 封装,接口中传入 `Armor` 类型的数据即可得到 `geometry_msgs::msg::Point` 类型的三维坐标。
考虑到装甲板的四个点在一个平面上在PnP解算方法上我们选择了 `cv::SOLVEPNP_IPPE` (Method is based on the paper of T. Collins and A. Bartoli. ["Infinitesimal Plane-Based Pose Estimation"](https://link.springer.com/article/10.1007/s11263-014-0725-5). This method requires coplanar object points.)

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 307 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 260 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 151 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View File

@ -0,0 +1,72 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__ARMOR_HPP_
#define ARMOR_DETECTOR__ARMOR_HPP_
#include <opencv2/core.hpp>
// STL
#include <algorithm>
#include <string>
namespace rm_auto_aim {
const int RED = 0;
const int BLUE = 1;
enum class ArmorType {
SMALL, LARGE, INVALID
};
const std::string ARMOR_TYPE_STR[3] = {"small", "large", "invalid"};
struct Light : public cv::RotatedRect {
Light() = default;
explicit Light(cv::RotatedRect box) : cv::RotatedRect(box) {
cv::Point2f p[4];
box.points(p);
std::sort(p, p + 4, [](const cv::Point2f &a, const cv::Point2f &b) { return a.y < b.y; });
top = (p[0] + p[1]) / 2;
bottom = (p[2] + p[3]) / 2;
length = cv::norm(top - bottom);
width = cv::norm(p[0] - p[1]);
tilt_angle = std::atan2(std::abs(top.x - bottom.x), std::abs(top.y - bottom.y));
tilt_angle = tilt_angle / CV_PI * 180;
}
int color;
cv::Point2f top, bottom;
double length;
double width;
float tilt_angle;
};
struct Armor {
Armor() = default;
Armor(const Light &l1, const Light &l2) {
if (l1.center.x < l2.center.x) {
left_light = l1, right_light = l2;
} else {
left_light = l2, right_light = l1;
}
center = (left_light.center + right_light.center) / 2;
}
// Light pairs part
Light left_light, right_light;
cv::Point2f center;
ArmorType type;
// Number part
cv::Mat number_img;
std::string number;
float confidence;
std::string classfication_result;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__ARMOR_HPP_

View File

@ -0,0 +1,83 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__DETECTOR_HPP_
#define ARMOR_DETECTOR__DETECTOR_HPP_
// OpenCV
#include <opencv2/core.hpp>
#include <opencv2/core/types.hpp>
// STD
#include <cmath>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
#include "armor_detector/number_classifier.hpp"
#include "auto_aim_interfaces/msg/debug_armors.hpp"
#include "auto_aim_interfaces/msg/debug_lights.hpp"
namespace rm_auto_aim
{
class Detector
{
public:
struct LightParams
{
// width / height
double min_ratio;
double max_ratio;
// vertical angle
double max_angle;
};
struct ArmorParams
{
double min_light_ratio;
// light pairs distance
double min_small_center_distance;
double max_small_center_distance;
double min_large_center_distance;
double max_large_center_distance;
// horizontal angle
double max_angle;
};
Detector(const int & bin_thres, const int & color, const LightParams & l, const ArmorParams & a);
std::vector<Armor> detect(const cv::Mat & input);
cv::Mat preprocessImage(const cv::Mat & input);
std::vector<Light> findLights(const cv::Mat & rbg_img, const cv::Mat & binary_img);
std::vector<Armor> matchLights(const std::vector<Light> & lights);
// For debug usage
cv::Mat getAllNumbersImage();
void drawResults(cv::Mat & img);
int binary_thres;
int detect_color;
LightParams l;
ArmorParams a;
std::unique_ptr<NumberClassifier> classifier;
// Debug msgs
cv::Mat binary_img;
auto_aim_interfaces::msg::DebugLights debug_lights;
auto_aim_interfaces::msg::DebugArmors debug_armors;
private:
bool isLight(const Light & possible_light);
bool containLight(
const Light & light_1, const Light & light_2, const std::vector<Light> & lights);
ArmorType isArmor(const Light & light_1, const Light & light_2);
std::vector<Light> lights_;
std::vector<Armor> armors_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__DETECTOR_HPP_

View File

@ -0,0 +1,81 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__DETECTOR_NODE_HPP_
#define ARMOR_DETECTOR__DETECTOR_NODE_HPP_
// ROS
#include <image_transport/image_transport.hpp>
#include <image_transport/publisher.hpp>
#include <image_transport/subscriber_filter.hpp>
#include <rclcpp/publisher.hpp>
#include <rclcpp/rclcpp.hpp>
#include <sensor_msgs/msg/camera_info.hpp>
#include <sensor_msgs/msg/image.hpp>
#include <visualization_msgs/msg/marker_array.hpp>
// STD
#include <memory>
#include <string>
#include <vector>
#include "armor_detector/detector.hpp"
#include "armor_detector/number_classifier.hpp"
#include "armor_detector/pnp_solver.hpp"
#include "auto_aim_interfaces/msg/armors.hpp"
namespace rm_auto_aim
{
class ArmorDetectorNode : public rclcpp::Node
{
public:
ArmorDetectorNode(const rclcpp::NodeOptions & options);
private:
void imageCallback(const sensor_msgs::msg::Image::ConstSharedPtr img_msg);
std::unique_ptr<Detector> initDetector();
std::vector<Armor> detectArmors(const sensor_msgs::msg::Image::ConstSharedPtr & img_msg);
void createDebugPublishers();
void destroyDebugPublishers();
void publishMarkers();
// Armor Detector
std::unique_ptr<Detector> detector_;
// Detected armors publisher
auto_aim_interfaces::msg::Armors armors_msg_;
rclcpp::Publisher<auto_aim_interfaces::msg::Armors>::SharedPtr armors_pub_;
// Visualization marker publisher
visualization_msgs::msg::Marker armor_marker_;
visualization_msgs::msg::Marker text_marker_;
visualization_msgs::msg::MarkerArray marker_array_;
rclcpp::Publisher<visualization_msgs::msg::MarkerArray>::SharedPtr marker_pub_;
// Camera info part
rclcpp::Subscription<sensor_msgs::msg::CameraInfo>::SharedPtr cam_info_sub_;
cv::Point2f cam_center_;
std::shared_ptr<sensor_msgs::msg::CameraInfo> cam_info_;
std::unique_ptr<PnPSolver> pnp_solver_;
// Image subscrpition
rclcpp::Subscription<sensor_msgs::msg::Image>::SharedPtr img_sub_;
// Debug information
bool debug_;
std::shared_ptr<rclcpp::ParameterEventHandler> debug_param_sub_;
std::shared_ptr<rclcpp::ParameterCallbackHandle> debug_cb_handle_;
rclcpp::Publisher<auto_aim_interfaces::msg::DebugLights>::SharedPtr lights_data_pub_;
rclcpp::Publisher<auto_aim_interfaces::msg::DebugArmors>::SharedPtr armors_data_pub_;
image_transport::Publisher binary_img_pub_;
image_transport::Publisher number_img_pub_;
image_transport::Publisher result_img_pub_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__DETECTOR_NODE_HPP_

View File

@ -0,0 +1,40 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_
#define ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_
// OpenCV
#include <opencv2/opencv.hpp>
// STL
#include <cstddef>
#include <iostream>
#include <map>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
namespace rm_auto_aim
{
class NumberClassifier
{
public:
NumberClassifier(
const std::string & model_path, const std::string & label_path, const double threshold,
const std::vector<std::string> & ignore_classes = {});
void extractNumbers(const cv::Mat & src, std::vector<Armor> & armors);
void classify(std::vector<Armor> & armors);
double threshold;
private:
cv::dnn::Net net_;
std::vector<std::string> class_names_;
std::vector<std::string> ignore_classes_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_

View File

@ -0,0 +1,48 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__PNP_SOLVER_HPP_
#define ARMOR_DETECTOR__PNP_SOLVER_HPP_
#include <geometry_msgs/msg/point.hpp>
#include <opencv2/core.hpp>
// STD
#include <array>
#include <vector>
#include "armor_detector/armor.hpp"
namespace rm_auto_aim
{
class PnPSolver
{
public:
PnPSolver(
const std::array<double, 9> & camera_matrix,
const std::vector<double> & distortion_coefficients);
// Get 3d position
bool solvePnP(const Armor & armor, cv::Mat & rvec, cv::Mat & tvec);
// Calculate the distance between armor center and image center
float calculateDistanceToCenter(const cv::Point2f & image_point);
private:
cv::Mat camera_matrix_;
cv::Mat dist_coeffs_;
// Unit: mm
static constexpr float SMALL_ARMOR_WIDTH = 135;
static constexpr float SMALL_ARMOR_HEIGHT = 55;
static constexpr float LARGE_ARMOR_WIDTH = 225;
static constexpr float LARGE_ARMOR_HEIGHT = 55;
// Four vertices of armor in 3d
std::vector<cv::Point3f> small_armor_points_;
std::vector<cv::Point3f> large_armor_points_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__PNP_SOLVER_HPP_

View File

@ -0,0 +1,9 @@
1
2
3
4
5
outpost
guard
base
negative

Binary file not shown.

View File

@ -0,0 +1,37 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>armor_detector</name>
<version>0.1.0</version>
<description>A template for ROS packages.</description>
<maintainer email="chen.junn@outlook.com">Chen Jun</maintainer>
<license>BSD</license>
<url type="website">https://github.com/chenjunnn/rm_auto_aim</url>
<url type="bugtracker">https://github.com/chenjunnn/rm_auto_aim/issues</url>
<author email="chen.junn@outlook.com">Chen Jun</author>
<!-- buildtool_depend: dependencies of the build process -->
<buildtool_depend>ament_cmake</buildtool_depend>
<!-- depend: build, export, and execution dependency -->
<depend>rclcpp</depend>
<depend>rclcpp_components</depend>
<depend>sensor_msgs</depend>
<depend>geometry_msgs</depend>
<depend>visualization_msgs</depend>
<depend>message_filters</depend>
<depend>cv_bridge</depend>
<depend>image_transport</depend>
<depend>image_transport_plugins</depend>
<depend>auto_aim_interfaces</depend>
<depend>vision_opencv</depend>
<depend>tf2_geometry_msgs</depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<test_depend>ament_cmake_clang_format</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -0,0 +1,245 @@
// Copyright (c) 2022 ChenJun
// Licensed under the MIT License.
// OpenCV
#include <opencv2/core.hpp>
#include <opencv2/core/base.hpp>
#include <opencv2/core/mat.hpp>
#include <opencv2/core/types.hpp>
#include <opencv2/imgproc.hpp>
// STD
#include <algorithm>
#include <cmath>
#include <vector>
#include "armor_detector/detector.hpp"
#include "auto_aim_interfaces/msg/debug_armor.hpp"
#include "auto_aim_interfaces/msg/debug_light.hpp"
namespace rm_auto_aim
{
Detector::Detector(
const int & bin_thres, const int & color, const LightParams & l, const ArmorParams & a)
: binary_thres(bin_thres), detect_color(color), l(l), a(a)
{
}
std::vector<Armor> Detector::detect(const cv::Mat & input)
{
binary_img = preprocessImage(input);
lights_ = findLights(input, binary_img);
armors_ = matchLights(lights_);
if (!armors_.empty()) {
classifier->extractNumbers(input, armors_);
classifier->classify(armors_);
}
return armors_;
}
cv::Mat Detector::preprocessImage(const cv::Mat & rgb_img)
{
cv::Mat gray_img;
cv::cvtColor(rgb_img, gray_img, cv::COLOR_RGB2GRAY);
cv::Mat binary_img;
cv::threshold(gray_img, binary_img, binary_thres, 255, cv::THRESH_BINARY);
return binary_img;
}
std::vector<Light> Detector::findLights(const cv::Mat & rbg_img, const cv::Mat & binary_img)
{
using std::vector;
vector<vector<cv::Point>> contours;
vector<cv::Vec4i> hierarchy;
cv::findContours(binary_img, contours, hierarchy, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
vector<Light> lights;
this->debug_lights.data.clear();
for (const auto & contour : contours) {
if (contour.size() < 5) continue;
auto r_rect = cv::minAreaRect(contour);
auto light = Light(r_rect);
if (isLight(light)) {
auto rect = light.boundingRect();
if ( // Avoid assertion failed
0 <= rect.x && 0 <= rect.width && rect.x + rect.width <= rbg_img.cols && 0 <= rect.y &&
0 <= rect.height && rect.y + rect.height <= rbg_img.rows) {
int sum_r = 0, sum_b = 0;
auto roi = rbg_img(rect);
// Iterate through the ROI
for (int i = 0; i < roi.rows; i++) {
for (int j = 0; j < roi.cols; j++) {
if (cv::pointPolygonTest(contour, cv::Point2f(j + rect.x, i + rect.y), false) >= 0) {
// if point is inside contour
sum_r += roi.at<cv::Vec3b>(i, j)[0];
sum_b += roi.at<cv::Vec3b>(i, j)[2];
}
}
}
// Sum of red pixels > sum of blue pixels ?
light.color = sum_r > sum_b ? RED : BLUE;
lights.emplace_back(light);
}
}
}
return lights;
}
bool Detector::isLight(const Light & light)
{
// The ratio of light (short side / long side)
float ratio = light.width / light.length;
bool ratio_ok = l.min_ratio < ratio && ratio < l.max_ratio;
bool angle_ok = light.tilt_angle < l.max_angle;
bool is_light = ratio_ok && angle_ok;
// Fill in debug information
auto_aim_interfaces::msg::DebugLight light_data;
light_data.center_x = light.center.x;
light_data.ratio = ratio;
light_data.angle = light.tilt_angle;
light_data.is_light = is_light;
this->debug_lights.data.emplace_back(light_data);
return is_light;
}
std::vector<Armor> Detector::matchLights(const std::vector<Light> & lights)
{
std::vector<Armor> armors;
this->debug_armors.data.clear();
// Loop all the pairing of lights
for (auto light_1 = lights.begin(); light_1 != lights.end(); light_1++) {
for (auto light_2 = light_1 + 1; light_2 != lights.end(); light_2++) {
if (light_1->color != detect_color || light_2->color != detect_color) continue;
if (containLight(*light_1, *light_2, lights)) {
continue;
}
auto type = isArmor(*light_1, *light_2);
if (type != ArmorType::INVALID) {
auto armor = Armor(*light_1, *light_2);
armor.type = type;
armors.emplace_back(armor);
}
}
}
return armors;
}
// Check if there is another light in the boundingRect formed by the 2 lights
bool Detector::containLight(
const Light & light_1, const Light & light_2, const std::vector<Light> & lights)
{
auto points = std::vector<cv::Point2f>{light_1.top, light_1.bottom, light_2.top, light_2.bottom};
auto bounding_rect = cv::boundingRect(points);
for (const auto & test_light : lights) {
if (test_light.center == light_1.center || test_light.center == light_2.center) continue;
if (
bounding_rect.contains(test_light.top) || bounding_rect.contains(test_light.bottom) ||
bounding_rect.contains(test_light.center)) {
return true;
}
}
return false;
}
ArmorType Detector::isArmor(const Light & light_1, const Light & light_2)
{
// Ratio of the length of 2 lights (short side / long side)
float light_length_ratio = light_1.length < light_2.length ? light_1.length / light_2.length
: light_2.length / light_1.length;
bool light_ratio_ok = light_length_ratio > a.min_light_ratio;
// Distance between the center of 2 lights (unit : light length)
float avg_light_length = (light_1.length + light_2.length) / 2;
float center_distance = cv::norm(light_1.center - light_2.center) / avg_light_length;
bool center_distance_ok = (a.min_small_center_distance <= center_distance &&
center_distance < a.max_small_center_distance) ||
(a.min_large_center_distance <= center_distance &&
center_distance < a.max_large_center_distance);
// Angle of light center connection
cv::Point2f diff = light_1.center - light_2.center;
float angle = std::abs(std::atan(diff.y / diff.x)) / CV_PI * 180;
bool angle_ok = angle < a.max_angle;
bool is_armor = light_ratio_ok && center_distance_ok && angle_ok;
// Judge armor type
ArmorType type;
if (is_armor) {
type = center_distance > a.min_large_center_distance ? ArmorType::LARGE : ArmorType::SMALL;
} else {
type = ArmorType::INVALID;
}
// Fill in debug information
auto_aim_interfaces::msg::DebugArmor armor_data;
armor_data.type = ARMOR_TYPE_STR[static_cast<int>(type)];
armor_data.center_x = (light_1.center.x + light_2.center.x) / 2;
armor_data.light_ratio = light_length_ratio;
armor_data.center_distance = center_distance;
armor_data.angle = angle;
this->debug_armors.data.emplace_back(armor_data);
return type;
}
cv::Mat Detector::getAllNumbersImage()
{
if (armors_.empty()) {
return cv::Mat(cv::Size(20, 28), CV_8UC1);
} else {
std::vector<cv::Mat> number_imgs;
number_imgs.reserve(armors_.size());
for (auto & armor : armors_) {
number_imgs.emplace_back(armor.number_img);
}
cv::Mat all_num_img;
cv::vconcat(number_imgs, all_num_img);
return all_num_img;
}
}
void Detector::drawResults(cv::Mat & img)
{
// Draw Lights
for (const auto & light : lights_) {
cv::circle(img, light.top, 3, cv::Scalar(255, 255, 255), 1);
cv::circle(img, light.bottom, 3, cv::Scalar(255, 255, 255), 1);
auto line_color = light.color == RED ? cv::Scalar(255, 255, 0) : cv::Scalar(255, 0, 255);
cv::line(img, light.top, light.bottom, line_color, 1);
}
// Draw armors
for (const auto & armor : armors_) {
cv::line(img, armor.left_light.top, armor.right_light.bottom, cv::Scalar(0, 255, 0), 2);
cv::line(img, armor.left_light.bottom, armor.right_light.top, cv::Scalar(0, 255, 0), 2);
}
// Show numbers and confidence
for (const auto & armor : armors_) {
cv::putText(
img, armor.classfication_result, armor.left_light.top, cv::FONT_HERSHEY_SIMPLEX, 0.8,
cv::Scalar(0, 255, 255), 2);
}
}
} // namespace rm_auto_aim

View File

@ -0,0 +1,292 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#include <cv_bridge/cv_bridge.h>
#include <rmw/qos_profiles.h>
#include <tf2/LinearMath/Matrix3x3.h>
#include <tf2/convert.h>
#include <ament_index_cpp/get_package_share_directory.hpp>
#include <image_transport/image_transport.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <rclcpp/duration.hpp>
#include <rclcpp/qos.hpp>
#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
// STD
#include <algorithm>
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
#include "armor_detector/detector_node.hpp"
namespace rm_auto_aim
{
ArmorDetectorNode::ArmorDetectorNode(const rclcpp::NodeOptions & options)
: Node("armor_detector", options)
{
RCLCPP_INFO(this->get_logger(), "Starting DetectorNode!");
// Detector
detector_ = initDetector();
// Armors Publisher
armors_pub_ = this->create_publisher<auto_aim_interfaces::msg::Armors>(
"/detector/armors", rclcpp::SensorDataQoS());
// Visualization Marker Publisher
// See http://wiki.ros.org/rviz/DisplayTypes/Marker
armor_marker_.ns = "armors";
armor_marker_.action = visualization_msgs::msg::Marker::ADD;
armor_marker_.type = visualization_msgs::msg::Marker::CUBE;
armor_marker_.scale.x = 0.05;
armor_marker_.scale.z = 0.125;
armor_marker_.color.a = 1.0;
armor_marker_.color.g = 0.5;
armor_marker_.color.b = 1.0;
armor_marker_.lifetime = rclcpp::Duration::from_seconds(0.1);
text_marker_.ns = "classification";
text_marker_.action = visualization_msgs::msg::Marker::ADD;
text_marker_.type = visualization_msgs::msg::Marker::TEXT_VIEW_FACING;
text_marker_.scale.z = 0.1;
text_marker_.color.a = 1.0;
text_marker_.color.r = 1.0;
text_marker_.color.g = 1.0;
text_marker_.color.b = 1.0;
text_marker_.lifetime = rclcpp::Duration::from_seconds(0.1);
marker_pub_ =
this->create_publisher<visualization_msgs::msg::MarkerArray>("/detector/marker", 10);
// Debug Publishers
debug_ = this->declare_parameter("debug", false);
if (debug_) {
createDebugPublishers();
}
// Debug param change moniter
debug_param_sub_ = std::make_shared<rclcpp::ParameterEventHandler>(this);
debug_cb_handle_ =
debug_param_sub_->add_parameter_callback("debug", [this](const rclcpp::Parameter & p) {
debug_ = p.as_bool();
debug_ ? createDebugPublishers() : destroyDebugPublishers();
});
cam_info_sub_ = this->create_subscription<sensor_msgs::msg::CameraInfo>(
"/camera_info", rclcpp::SensorDataQoS(),
[this](sensor_msgs::msg::CameraInfo::ConstSharedPtr camera_info) {
cam_center_ = cv::Point2f(camera_info->k[2], camera_info->k[5]);
cam_info_ = std::make_shared<sensor_msgs::msg::CameraInfo>(*camera_info);
pnp_solver_ = std::make_unique<PnPSolver>(camera_info->k, camera_info->d);
cam_info_sub_.reset();
});
img_sub_ = this->create_subscription<sensor_msgs::msg::Image>(
"/image_raw", rclcpp::SensorDataQoS(),
std::bind(&ArmorDetectorNode::imageCallback, this, std::placeholders::_1));
}
void ArmorDetectorNode::imageCallback(const sensor_msgs::msg::Image::ConstSharedPtr img_msg)
{
auto armors = detectArmors(img_msg);
if (pnp_solver_ != nullptr) {
armors_msg_.header = armor_marker_.header = text_marker_.header = img_msg->header;
armors_msg_.armors.clear();
marker_array_.markers.clear();
armor_marker_.id = 0;
text_marker_.id = 0;
auto_aim_interfaces::msg::Armor armor_msg;
for (const auto & armor : armors) {
cv::Mat rvec, tvec;
bool success = pnp_solver_->solvePnP(armor, rvec, tvec);
if (success) {
// Fill basic info
armor_msg.type = ARMOR_TYPE_STR[static_cast<int>(armor.type)];
armor_msg.number = armor.number;
// Fill pose
armor_msg.pose.position.x = tvec.at<double>(0);
armor_msg.pose.position.y = tvec.at<double>(1);
armor_msg.pose.position.z = tvec.at<double>(2);
// rvec to 3x3 rotation matrix
cv::Mat rotation_matrix;
cv::Rodrigues(rvec, rotation_matrix);
// rotation matrix to quaternion
tf2::Matrix3x3 tf2_rotation_matrix(
rotation_matrix.at<double>(0, 0), rotation_matrix.at<double>(0, 1),
rotation_matrix.at<double>(0, 2), rotation_matrix.at<double>(1, 0),
rotation_matrix.at<double>(1, 1), rotation_matrix.at<double>(1, 2),
rotation_matrix.at<double>(2, 0), rotation_matrix.at<double>(2, 1),
rotation_matrix.at<double>(2, 2));
tf2::Quaternion tf2_q;
tf2_rotation_matrix.getRotation(tf2_q);
armor_msg.pose.orientation = tf2::toMsg(tf2_q);
// Fill the distance to image center
armor_msg.distance_to_image_center = pnp_solver_->calculateDistanceToCenter(armor.center);
// Fill the markers
armor_marker_.id++;
armor_marker_.scale.y = armor.type == ArmorType::SMALL ? 0.135 : 0.23;
armor_marker_.pose = armor_msg.pose;
text_marker_.id++;
text_marker_.pose.position = armor_msg.pose.position;
text_marker_.pose.position.y -= 0.1;
text_marker_.text = armor.classfication_result;
armors_msg_.armors.emplace_back(armor_msg);
marker_array_.markers.emplace_back(armor_marker_);
marker_array_.markers.emplace_back(text_marker_);
} else {
RCLCPP_WARN(this->get_logger(), "PnP failed!");
}
}
// Publishing detected armors
armors_pub_->publish(armors_msg_);
// Publishing marker
publishMarkers();
}
}
std::unique_ptr<Detector> ArmorDetectorNode::initDetector()
{
rcl_interfaces::msg::ParameterDescriptor param_desc;
param_desc.integer_range.resize(1);
param_desc.integer_range[0].step = 1;
param_desc.integer_range[0].from_value = 0;
param_desc.integer_range[0].to_value = 255;
int binary_thres = declare_parameter("binary_thres", 160, param_desc);
param_desc.description = "0-RED, 1-BLUE";
param_desc.integer_range[0].from_value = 0;
param_desc.integer_range[0].to_value = 1;
auto detect_color = declare_parameter("detect_color", RED, param_desc);
Detector::LightParams l_params = {
.min_ratio = declare_parameter("light.min_ratio", 0.1),
.max_ratio = declare_parameter("light.max_ratio", 0.4),
.max_angle = declare_parameter("light.max_angle", 40.0)};
Detector::ArmorParams a_params = {
.min_light_ratio = declare_parameter("armor.min_light_ratio", 0.7),
.min_small_center_distance = declare_parameter("armor.min_small_center_distance", 0.8),
.max_small_center_distance = declare_parameter("armor.max_small_center_distance", 3.2),
.min_large_center_distance = declare_parameter("armor.min_large_center_distance", 3.2),
.max_large_center_distance = declare_parameter("armor.max_large_center_distance", 5.0),
.max_angle = declare_parameter("armor.max_angle", 35.0)};
auto detector = std::make_unique<Detector>(binary_thres, detect_color, l_params, a_params);
// Init classifier
auto pkg_path = ament_index_cpp::get_package_share_directory("armor_detector");
auto model_path = pkg_path + "/model/mlp.onnx";
auto label_path = pkg_path + "/model/label.txt";
double threshold = this->declare_parameter("classifier_threshold", 0.7);
std::vector<std::string> ignore_classes =
this->declare_parameter("ignore_classes", std::vector<std::string>{"negative"});
detector->classifier =
std::make_unique<NumberClassifier>(model_path, label_path, threshold, ignore_classes);
return detector;
}
std::vector<Armor> ArmorDetectorNode::detectArmors(
const sensor_msgs::msg::Image::ConstSharedPtr & img_msg)
{
// Convert ROS img to cv::Mat
auto img = cv_bridge::toCvShare(img_msg, "rgb8")->image;
// Update params
detector_->binary_thres = get_parameter("binary_thres").as_int();
detector_->detect_color = get_parameter("detect_color").as_int();
detector_->classifier->threshold = get_parameter("classifier_threshold").as_double();
auto armors = detector_->detect(img);
auto final_time = this->now();
auto latency = (final_time - img_msg->header.stamp).seconds() * 1000;
RCLCPP_DEBUG_STREAM(this->get_logger(), "Latency: " << latency << "ms");
// Publish debug info
if (debug_) {
binary_img_pub_.publish(
cv_bridge::CvImage(img_msg->header, "mono8", detector_->binary_img).toImageMsg());
// Sort lights and armors data by x coordinate
std::sort(
detector_->debug_lights.data.begin(), detector_->debug_lights.data.end(),
[](const auto & l1, const auto & l2) { return l1.center_x < l2.center_x; });
std::sort(
detector_->debug_armors.data.begin(), detector_->debug_armors.data.end(),
[](const auto & a1, const auto & a2) { return a1.center_x < a2.center_x; });
lights_data_pub_->publish(detector_->debug_lights);
armors_data_pub_->publish(detector_->debug_armors);
if (!armors.empty()) {
auto all_num_img = detector_->getAllNumbersImage();
number_img_pub_.publish(
*cv_bridge::CvImage(img_msg->header, "mono8", all_num_img).toImageMsg());
}
detector_->drawResults(img);
// Draw camera center
cv::circle(img, cam_center_, 5, cv::Scalar(255, 0, 0), 2);
// Draw latency
std::stringstream latency_ss;
latency_ss << "Latency: " << std::fixed << std::setprecision(2) << latency << "ms";
auto latency_s = latency_ss.str();
cv::putText(
img, latency_s, cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 255, 0), 2);
result_img_pub_.publish(cv_bridge::CvImage(img_msg->header, "rgb8", img).toImageMsg());
}
return armors;
}
void ArmorDetectorNode::createDebugPublishers()
{
lights_data_pub_ =
this->create_publisher<auto_aim_interfaces::msg::DebugLights>("/detector/debug_lights", 10);
armors_data_pub_ =
this->create_publisher<auto_aim_interfaces::msg::DebugArmors>("/detector/debug_armors", 10);
binary_img_pub_ = image_transport::create_publisher(this, "/detector/binary_img");
number_img_pub_ = image_transport::create_publisher(this, "/detector/number_img");
result_img_pub_ = image_transport::create_publisher(this, "/detector/result_img");
}
void ArmorDetectorNode::destroyDebugPublishers()
{
lights_data_pub_.reset();
armors_data_pub_.reset();
binary_img_pub_.shutdown();
number_img_pub_.shutdown();
result_img_pub_.shutdown();
}
void ArmorDetectorNode::publishMarkers()
{
using Marker = visualization_msgs::msg::Marker;
armor_marker_.action = armors_msg_.armors.empty() ? Marker::DELETE : Marker::ADD;
marker_array_.markers.emplace_back(armor_marker_);
marker_pub_->publish(marker_array_);
}
} // namespace rm_auto_aim
#include "rclcpp_components/register_node_macro.hpp"
// Register the component with class_loader.
// This acts as a sort of entry point, allowing the component to be discoverable when its library
// is being loaded into a running process.
RCLCPP_COMPONENTS_REGISTER_NODE(rm_auto_aim::ArmorDetectorNode)

View File

@ -0,0 +1,147 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
// OpenCV
#include <opencv2/core.hpp>
#include <opencv2/core/mat.hpp>
#include <opencv2/core/types.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/opencv.hpp>
// STL
#include <algorithm>
#include <cstddef>
#include <fstream>
#include <map>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
#include "armor_detector/number_classifier.hpp"
namespace rm_auto_aim
{
NumberClassifier::NumberClassifier(
const std::string & model_path, const std::string & label_path, const double thre,
const std::vector<std::string> & ignore_classes)
: threshold(thre), ignore_classes_(ignore_classes)
{
net_ = cv::dnn::readNetFromONNX(model_path);
std::ifstream label_file(label_path);
std::string line;
while (std::getline(label_file, line)) {
class_names_.push_back(line);
}
}
void NumberClassifier::extractNumbers(const cv::Mat & src, std::vector<Armor> & armors)
{
// Light length in image
const int light_length = 12;
// Image size after warp
const int warp_height = 28;
const int small_armor_width = 32;
const int large_armor_width = 54;
// Number ROI size
const cv::Size roi_size(20, 28);
for (auto & armor : armors) {
// Warp perspective transform
cv::Point2f lights_vertices[4] = {
armor.left_light.bottom, armor.left_light.top, armor.right_light.top,
armor.right_light.bottom};
const int top_light_y = (warp_height - light_length) / 2 - 1;
const int bottom_light_y = top_light_y + light_length;
const int warp_width = armor.type == ArmorType::SMALL ? small_armor_width : large_armor_width;
cv::Point2f target_vertices[4] = {
cv::Point(0, bottom_light_y),
cv::Point(0, top_light_y),
cv::Point(warp_width - 1, top_light_y),
cv::Point(warp_width - 1, bottom_light_y),
};
cv::Mat number_image;
auto rotation_matrix = cv::getPerspectiveTransform(lights_vertices, target_vertices);
cv::warpPerspective(src, number_image, rotation_matrix, cv::Size(warp_width, warp_height));
// Get ROI
number_image =
number_image(cv::Rect(cv::Point((warp_width - roi_size.width) / 2, 0), roi_size));
// Binarize
cv::cvtColor(number_image, number_image, cv::COLOR_RGB2GRAY);
cv::threshold(number_image, number_image, 0, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
armor.number_img = number_image;
}
}
void NumberClassifier::classify(std::vector<Armor> & armors)
{
for (auto & armor : armors) {
cv::Mat image = armor.number_img.clone();
// Normalize
image = image / 255.0;
// Create blob from image
cv::Mat blob;
cv::dnn::blobFromImage(image, blob);
// Set the input blob for the neural network
net_.setInput(blob);
// Forward pass the image blob through the model
cv::Mat outputs = net_.forward();
// Do softmax
float max_prob = *std::max_element(outputs.begin<float>(), outputs.end<float>());
cv::Mat softmax_prob;
cv::exp(outputs - max_prob, softmax_prob);
float sum = static_cast<float>(cv::sum(softmax_prob)[0]);
softmax_prob /= sum;
double confidence;
cv::Point class_id_point;
minMaxLoc(softmax_prob.reshape(1, 1), nullptr, &confidence, nullptr, &class_id_point);
int label_id = class_id_point.x;
armor.confidence = confidence;
armor.number = class_names_[label_id];
std::stringstream result_ss;
result_ss << armor.number << ": " << std::fixed << std::setprecision(1)
<< armor.confidence * 100.0 << "%";
armor.classfication_result = result_ss.str();
}
armors.erase(
std::remove_if(
armors.begin(), armors.end(),
[this](const Armor & armor) {
if (armor.confidence < threshold) {
return true;
}
for (const auto & ignore_class : ignore_classes_) {
if (armor.number == ignore_class) {
return true;
}
}
bool mismatch_armor_type = false;
if (armor.type == ArmorType::LARGE) {
mismatch_armor_type =
armor.number == "outpost" || armor.number == "2" || armor.number == "guard";
} else if (armor.type == ArmorType::SMALL) {
mismatch_armor_type = armor.number == "1" || armor.number == "base";
}
return mismatch_armor_type;
}),
armors.end());
}
} // namespace rm_auto_aim

View File

@ -0,0 +1,58 @@
// Copyright 2022 Chen Jun
#include "armor_detector/pnp_solver.hpp"
#include <opencv2/calib3d.hpp>
#include <vector>
namespace rm_auto_aim
{
PnPSolver::PnPSolver(
const std::array<double, 9> & camera_matrix, const std::vector<double> & dist_coeffs)
: camera_matrix_(cv::Mat(3, 3, CV_64F, const_cast<double *>(camera_matrix.data())).clone()),
dist_coeffs_(cv::Mat(1, 5, CV_64F, const_cast<double *>(dist_coeffs.data())).clone())
{
// Unit: m
constexpr double small_half_y = SMALL_ARMOR_WIDTH / 2.0 / 1000.0;
constexpr double small_half_z = SMALL_ARMOR_HEIGHT / 2.0 / 1000.0;
constexpr double large_half_y = LARGE_ARMOR_WIDTH / 2.0 / 1000.0;
constexpr double large_half_z = LARGE_ARMOR_HEIGHT / 2.0 / 1000.0;
// Start from bottom left in clockwise order
// Model coordinate: x forward, y left, z up
small_armor_points_.emplace_back(cv::Point3f(0, small_half_y, -small_half_z));
small_armor_points_.emplace_back(cv::Point3f(0, small_half_y, small_half_z));
small_armor_points_.emplace_back(cv::Point3f(0, -small_half_y, small_half_z));
small_armor_points_.emplace_back(cv::Point3f(0, -small_half_y, -small_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, large_half_y, -large_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, large_half_y, large_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, -large_half_y, large_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, -large_half_y, -large_half_z));
}
bool PnPSolver::solvePnP(const Armor & armor, cv::Mat & rvec, cv::Mat & tvec)
{
std::vector<cv::Point2f> image_armor_points;
// Fill in image points
image_armor_points.emplace_back(armor.left_light.bottom);
image_armor_points.emplace_back(armor.left_light.top);
image_armor_points.emplace_back(armor.right_light.top);
image_armor_points.emplace_back(armor.right_light.bottom);
// Solve pnp
auto object_points = armor.type == ArmorType::SMALL ? small_armor_points_ : large_armor_points_;
return cv::solvePnP(
object_points, image_armor_points, camera_matrix_, dist_coeffs_, rvec, tvec, false,
cv::SOLVEPNP_IPPE);
}
float PnPSolver::calculateDistanceToCenter(const cv::Point2f & image_point)
{
float cx = camera_matrix_.at<double>(0, 2);
float cy = camera_matrix_.at<double>(1, 2);
return cv::norm(image_point - cv::Point2f(cx, cy));
}
} // namespace rm_auto_aim

View File

@ -0,0 +1,28 @@
// Copyright 2022 Chen Jun
#include <gtest/gtest.h>
#include <rclcpp/executors.hpp>
#include <rclcpp/node_options.hpp>
#include <rclcpp/utilities.hpp>
// STD
#include <memory>
#include "armor_detector/detector_node.hpp"
TEST(ArmorDetectorNodeTest, NodeStartupTest)
{
rclcpp::NodeOptions options;
auto node = std::make_shared<rm_auto_aim::ArmorDetectorNode>(options);
node.reset();
}
int main(int argc, char ** argv)
{
testing::InitGoogleTest(&argc, argv);
rclcpp::init(argc, argv);
auto result = RUN_ALL_TESTS();
rclcpp::shutdown();
return result;
}

View File

@ -0,0 +1,53 @@
// Copyright 2022 Chen Jun
#include <gtest/gtest.h>
#include <ament_index_cpp/get_package_share_directory.hpp>
#include <opencv2/core/mat.hpp>
// STL
#include <algorithm>
#include <chrono>
#include <iostream>
#include <map>
#include <vector>
#include "armor_detector/number_classifier.hpp"
using hrc = std::chrono::high_resolution_clock;
TEST(test_nc, benchmark)
{
auto pkg_path = ament_index_cpp::get_package_share_directory("armor_detector");
auto model_path = pkg_path + "/model/mlp.onnx";
auto label_path = pkg_path + "/model/label.txt";
rm_auto_aim::NumberClassifier nc(model_path, label_path, 0.5);
auto dummy_armors = std::vector<rm_auto_aim::Armor>(1);
auto test_mat = cv::Mat(20, 28, CV_8UC1);
dummy_armors[0].number_img = test_mat;
int loop_num = 200;
int warm_up = 30;
double time_min = DBL_MAX;
double time_max = -DBL_MAX;
double time_avg = 0;
for (int i = 0; i < warm_up + loop_num; i++) {
auto start = hrc::now();
nc.classify(dummy_armors);
auto end = hrc::now();
double time = std::chrono::duration<double, std::milli>(end - start).count();
if (i >= warm_up) {
time_min = std::min(time_min, time);
time_max = std::max(time_max, time);
time_avg += time;
}
}
time_avg /= loop_num;
std::cout << "time_min: " << time_min << "ms" << std::endl;
std::cout << "time_max: " << time_max << "ms" << std::endl;
std::cout << "time_avg: " << time_avg << "ms" << std::endl;
}

View File

@ -0,0 +1,58 @@
cmake_minimum_required(VERSION 3.10)
project(armor_tracker)
## Use C++14
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
## By adding -Wall and -Werror, the compiler does not ignore warnings anymore,
## enforcing cleaner code.
add_definitions(-Wall -Werror)
## Export compile commands for clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
#######################
## Find dependencies ##
#######################
find_package(ament_cmake_auto REQUIRED)
ament_auto_find_build_dependencies()
###########
## Build ##
###########
ament_auto_add_library(${PROJECT_NAME} SHARED
DIRECTORY src
)
rclcpp_components_register_node(${PROJECT_NAME}
PLUGIN rm_auto_aim::ArmorTrackerNode
EXECUTABLE ${PROJECT_NAME}_node
)
#############
## Testing ##
#############
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
list(APPEND AMENT_LINT_AUTO_EXCLUDE
ament_cmake_copyright
ament_cmake_uncrustify
ament_cmake_cpplint
)
ament_lint_auto_find_test_dependencies()
# find_package(ament_cmake_gtest)
# set(TEST_NAME test_kalman_filter)
# ament_add_gtest(${TEST_NAME} test/${TEST_NAME}.cpp)
# target_link_libraries(${TEST_NAME} ${PROJECT_NAME})
endif()
#############
## Install ##
#############
ament_auto_package()

View File

@ -0,0 +1,84 @@
# armor_tracker
- [ArmorTrackerNode](#armortrackernode)
- [Tracker](#tracker)
- [KalmanFilter](#kalmanfilter)
## ArmorTrackerNode
装甲板处理节点
订阅识别节点发布的装甲板三维位置及机器人的坐标转换信息将装甲板三维位置变换到指定惯性系一般是以云台中心为原点IMU 上电时的 Yaw 朝向为 X 轴的惯性系)下,然后将装甲板目标送入跟踪器中,输出跟踪机器人在指定惯性系下的状态
订阅:
- 已识别到的装甲板 `/detector/armors`
- 机器人的坐标转换信息 `/tf` `/tf_static`
发布:
- 最终锁定的目标 `/tracker/target`
参数:
- 跟踪器参数 tracker
- 两帧间目标可匹配的最大距离 max_match_distance
- `DETECTING` 状态进入 `TRACKING` 状态的阈值 tracking_threshold
- `TRACKING` 状态进入 `LOST` 状态的阈值 lost_threshold
## ExtendedKalmanFilter
$$ x_c = x_a + r * cos (\theta) $$
$$ y_c = y_a + r * sin (\theta) $$
$$ x = [x_c, y_c,z, yaw, v_{xc}, v_{yc},v_z, v_{yaw}, r]^T $$
参考 OpenCV 中的卡尔曼滤波器使用 Eigen 进行了实现
[卡尔曼滤波器](https://zh.wikipedia.org/wiki/%E5%8D%A1%E5%B0%94%E6%9B%BC%E6%BB%A4%E6%B3%A2)
![](docs/Kalman_filter_model.png)
考虑到自瞄任务中对于目标只有观测没有控制,所以输入-控制模型 $B$ 和控制器向量 $u$ 可忽略。
预测及更新的公式如下:
预测:
$$ x_{k|k-1} = F * x_{k-1|k-1} $$
$$ P_{k|k-1} = F * P_{k-1|k-1}* F^T + Q $$
更新:
$$ K = P_{k|k-1} * H^T * (H * P_{k|k-1} * H^T + R)^{-1} $$
$$ x_{k|k} = x_{k|k-1} + K * (z_k - H * x_{k|k-1}) $$
$$ P_{k|k} = (I - K * H) * P_{k|k-1} $$
## Tracker
参考 [SORT(Simple online and realtime tracking)](https://ieeexplore.ieee.org/abstract/document/7533003/) 中对于目标匹配的方法,使用卡尔曼滤波器对单目标在三维空间中进行跟踪
在此跟踪器中卡尔曼滤波器观测量为目标在指定惯性系中的位置xyz状态量为目标位置及速度xyz+vx vy vz
在对目标的运动模型建模为在指定惯性系中的匀速线性运动,即状态转移为 $x_{pre} = x_{post} + v_{post} * dt$
目标关联的判断依据为三维位置的 L2 欧式距离
跟踪器共有四个状态:
- `DETECTING` :短暂识别到目标,需要更多帧识别信息才能进入跟踪状态
- `TRACKING` :跟踪器正常跟踪目标中
- `TEMP_LOST` :跟踪器短暂丢失目标,通过卡尔曼滤波器预测目标
- `LOST` :跟踪器完全丢失目标
工作流程:
- init
跟踪器默认选择离相机光心最近的目标作为跟踪对象,选择目标后初始化卡尔曼滤波器,初始状态设为当前目标位置,速度设为 0
- update:
首先由卡尔曼滤波器得到目标在当前帧的预测位置,然后遍历当前帧中的目标位置与预测位置进行匹配,若当前帧不存在目标或所有目标位置与预测位置的偏差都过大则认为目标丢失,重置卡尔曼滤波器。
最后选取位置相差最小的目标作为最佳匹配项,更新卡尔曼滤波器,将更新后的状态作为跟踪器的结果输出

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

View File

@ -0,0 +1,74 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_PROCESSOR__KALMAN_FILTER_HPP_
#define ARMOR_PROCESSOR__KALMAN_FILTER_HPP_
#include <Eigen/Dense>
#include <functional>
namespace rm_auto_aim
{
class ExtendedKalmanFilter
{
public:
ExtendedKalmanFilter() = default;
using VecVecFunc = std::function<Eigen::VectorXd(const Eigen::VectorXd &)>;
using VecMatFunc = std::function<Eigen::MatrixXd(const Eigen::VectorXd &)>;
using VoidMatFunc = std::function<Eigen::MatrixXd()>;
explicit ExtendedKalmanFilter(
const VecVecFunc & f, const VecVecFunc & h, const VecMatFunc & j_f, const VecMatFunc & j_h,
const VoidMatFunc & u_q, const VecMatFunc & u_r, const Eigen::MatrixXd & P0);
// Set the initial state
void setState(const Eigen::VectorXd & x0);
// Compute a predicted state
Eigen::MatrixXd predict();
// Update the estimated state based on measurement
Eigen::MatrixXd update(const Eigen::VectorXd & z);
private:
// Process nonlinear vector function
VecVecFunc f;
// Observation nonlinear vector function
VecVecFunc h;
// Jacobian of f()
VecMatFunc jacobian_f;
Eigen::MatrixXd F;
// Jacobian of h()
VecMatFunc jacobian_h;
Eigen::MatrixXd H;
// Process noise covariance matrix
VoidMatFunc update_Q;
Eigen::MatrixXd Q;
// Measurement noise covariance matrix
VecMatFunc update_R;
Eigen::MatrixXd R;
// Priori error estimate covariance matrix
Eigen::MatrixXd P_pri;
// Posteriori error estimate covariance matrix
Eigen::MatrixXd P_post;
// Kalman gain
Eigen::MatrixXd K;
// System dimensions
int n;
// N-size identity
Eigen::MatrixXd I;
// Priori state
Eigen::VectorXd x_pri;
// Posteriori state
Eigen::VectorXd x_post;
};
} // namespace rm_auto_aim
#endif // ARMOR_PROCESSOR__KALMAN_FILTER_HPP_

View File

@ -0,0 +1,87 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_PROCESSOR__TRACKER_HPP_
#define ARMOR_PROCESSOR__TRACKER_HPP_
// Eigen
#include <Eigen/Eigen>
// ROS
#include <geometry_msgs/msg/point.hpp>
#include <geometry_msgs/msg/quaternion.hpp>
#include <geometry_msgs/msg/vector3.hpp>
// STD
#include <memory>
#include <string>
#include "armor_tracker/extended_kalman_filter.hpp"
#include "auto_aim_interfaces/msg/armors.hpp"
#include "auto_aim_interfaces/msg/target.hpp"
namespace rm_auto_aim
{
enum class ArmorsNum { NORMAL_4 = 4, BALANCE_2 = 2, OUTPOST_3 = 3 };
class Tracker
{
public:
Tracker(double max_match_distance, double max_match_yaw_diff);
using Armors = auto_aim_interfaces::msg::Armors;
using Armor = auto_aim_interfaces::msg::Armor;
void init(const Armors::SharedPtr & armors_msg);
void update(const Armors::SharedPtr & armors_msg);
ExtendedKalmanFilter ekf;
int tracking_thres;
int lost_thres;
enum State {
LOST,
DETECTING,
TRACKING,
TEMP_LOST,
} tracker_state;
std::string tracked_id;
Armor tracked_armor;
ArmorsNum tracked_armors_num;
double info_position_diff;
double info_yaw_diff;
Eigen::VectorXd measurement;
Eigen::VectorXd target_state;
// To store another pair of armors message
double dz, another_r;
private:
void initEKF(const Armor & a);
void updateArmorsNum(const Armor & a);
void handleArmorJump(const Armor & a);
double orientationToYaw(const geometry_msgs::msg::Quaternion & q);
Eigen::Vector3d getArmorPositionFromState(const Eigen::VectorXd & x);
double max_match_distance_;
double max_match_yaw_diff_;
int detect_count_;
int lost_count_;
double last_yaw_;
};
} // namespace rm_auto_aim
#endif // ARMOR_PROCESSOR__TRACKER_HPP_

View File

@ -0,0 +1,76 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_
#define ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_
// ROS
#include <message_filters/subscriber.h>
#include <tf2_ros/buffer.h>
#include <tf2_ros/create_timer_ros.h>
#include <tf2_ros/message_filter.h>
#include <tf2_ros/transform_listener.h>
#include <rclcpp/rclcpp.hpp>
#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
#include <visualization_msgs/msg/marker_array.hpp>
// STD
#include <memory>
#include <string>
#include <vector>
#include "armor_tracker/tracker.hpp"
#include "auto_aim_interfaces/msg/armors.hpp"
#include "auto_aim_interfaces/msg/target.hpp"
#include "auto_aim_interfaces/msg/tracker_info.hpp"
namespace rm_auto_aim
{
using tf2_filter = tf2_ros::MessageFilter<auto_aim_interfaces::msg::Armors>;
class ArmorTrackerNode : public rclcpp::Node
{
public:
explicit ArmorTrackerNode(const rclcpp::NodeOptions & options);
private:
void armorsCallback(const auto_aim_interfaces::msg::Armors::SharedPtr armors_ptr);
void publishMarkers(const auto_aim_interfaces::msg::Target & target_msg);
// Maximum allowable armor distance in the XOY plane
double max_armor_distance_;
// The time when the last message was received
rclcpp::Time last_time_;
double dt_;
// Armor tracker
double s2qxyz_, s2qyaw_, s2qr_;
double r_xyz_factor, r_yaw;
double lost_time_thres_;
std::unique_ptr<Tracker> tracker_;
// Subscriber with tf2 message_filter
std::string target_frame_;
std::shared_ptr<tf2_ros::Buffer> tf2_buffer_;
std::shared_ptr<tf2_ros::TransformListener> tf2_listener_;
message_filters::Subscriber<auto_aim_interfaces::msg::Armors> armors_sub_;
std::shared_ptr<tf2_filter> tf2_filter_;
// Tracker info publisher
rclcpp::Publisher<auto_aim_interfaces::msg::TrackerInfo>::SharedPtr info_pub_;
// Publisher
rclcpp::Publisher<auto_aim_interfaces::msg::Target>::SharedPtr target_pub_;
// Visualization marker publisher
visualization_msgs::msg::Marker position_marker_;
visualization_msgs::msg::Marker linear_v_marker_;
visualization_msgs::msg::Marker angular_v_marker_;
visualization_msgs::msg::Marker armor_marker_;
rclcpp::Publisher<visualization_msgs::msg::MarkerArray>::SharedPtr marker_pub_;
};
} // namespace rm_auto_aim
#endif // ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_

View File

@ -0,0 +1,34 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>armor_tracker</name>
<version>0.1.0</version>
<description>A template for ROS packages.</description>
<maintainer email="chen.junn@outlook.com">Chen Jun</maintainer>
<license>BSD</license>
<url type="website">https://github.com/chenjunnn/rm_auto_aim</url>
<url type="bugtracker">https://github.com/chenjunnn/rm_auto_aim/issues</url>
<author email="chen.junn@outlook.com">Chen Jun</author>
<!-- buildtool_depend: dependencies of the build process -->
<buildtool_depend>ament_cmake</buildtool_depend>
<!-- depend: build, export, and execution dependency -->
<depend>rclcpp</depend>
<depend>rclcpp_components</depend>
<depend>eigen</depend>
<depend>angles</depend>
<depend>geometry_msgs</depend>
<depend>visualization_msgs</depend>
<depend>message_filters</depend>
<depend>tf2_geometry_msgs</depend>
<depend>auto_aim_interfaces</depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<test_depend>ament_cmake_clang_format</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -0,0 +1,51 @@
// Copyright 2022 Chen Jun
#include "armor_tracker/extended_kalman_filter.hpp"
namespace rm_auto_aim
{
ExtendedKalmanFilter::ExtendedKalmanFilter(
const VecVecFunc & f, const VecVecFunc & h, const VecMatFunc & j_f, const VecMatFunc & j_h,
const VoidMatFunc & u_q, const VecMatFunc & u_r, const Eigen::MatrixXd & P0)
: f(f),
h(h),
jacobian_f(j_f),
jacobian_h(j_h),
update_Q(u_q),
update_R(u_r),
P_post(P0),
n(P0.rows()),
I(Eigen::MatrixXd::Identity(n, n)),
x_pri(n),
x_post(n)
{
}
void ExtendedKalmanFilter::setState(const Eigen::VectorXd & x0) { x_post = x0; }
Eigen::MatrixXd ExtendedKalmanFilter::predict()
{
F = jacobian_f(x_post), Q = update_Q();
x_pri = f(x_post);
P_pri = F * P_post * F.transpose() + Q;
// handle the case when there will be no measurement before the next predict
x_post = x_pri;
P_post = P_pri;
return x_pri;
}
Eigen::MatrixXd ExtendedKalmanFilter::update(const Eigen::VectorXd & z)
{
H = jacobian_h(x_pri), R = update_R(z);
K = P_pri * H.transpose() * (H * P_pri * H.transpose() + R).inverse();
x_post = x_pri + K * (z - h(x_pri));
P_post = (I - K * H) * P_pri;
return x_post;
}
} // namespace rm_auto_aim

View File

@ -0,0 +1,239 @@
// Copyright 2022 Chen Jun
#include "armor_tracker/tracker.hpp"
#include <angles/angles.h>
#include <tf2/LinearMath/Matrix3x3.h>
#include <tf2/LinearMath/Quaternion.h>
#include <tf2/convert.h>
#include <rclcpp/logger.hpp>
#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
// STD
#include <cfloat>
#include <memory>
#include <string>
namespace rm_auto_aim
{
Tracker::Tracker(double max_match_distance, double max_match_yaw_diff)
: tracker_state(LOST),
tracked_id(std::string("")),
measurement(Eigen::VectorXd::Zero(4)),
target_state(Eigen::VectorXd::Zero(9)),
max_match_distance_(max_match_distance),
max_match_yaw_diff_(max_match_yaw_diff)
{
}
void Tracker::init(const Armors::SharedPtr & armors_msg)
{
if (armors_msg->armors.empty()) {
return;
}
// Simply choose the armor that is closest to image center
double min_distance = DBL_MAX;
tracked_armor = armors_msg->armors[0];
for (const auto & armor : armors_msg->armors) {
if (armor.distance_to_image_center < min_distance) {
min_distance = armor.distance_to_image_center;
tracked_armor = armor;
}
}
initEKF(tracked_armor);
RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "Init EKF!");
tracked_id = tracked_armor.number;
tracker_state = DETECTING;
updateArmorsNum(tracked_armor);
}
void Tracker::update(const Armors::SharedPtr & armors_msg)
{
// KF predict
Eigen::VectorXd ekf_prediction = ekf.predict();
RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "EKF predict");
bool matched = false;
// Use KF prediction as default target state if no matched armor is found
target_state = ekf_prediction;
if (!armors_msg->armors.empty()) {
// Find the closest armor with the same id
Armor same_id_armor;
int same_id_armors_count = 0;
auto predicted_position = getArmorPositionFromState(ekf_prediction);
double min_position_diff = DBL_MAX;
double yaw_diff = DBL_MAX;
for (const auto & armor : armors_msg->armors) {
// Only consider armors with the same id
if (armor.number == tracked_id) {
same_id_armor = armor;
same_id_armors_count++;
// Calculate the difference between the predicted position and the current armor position
auto p = armor.pose.position;
Eigen::Vector3d position_vec(p.x, p.y, p.z);
double position_diff = (predicted_position - position_vec).norm();
if (position_diff < min_position_diff) {
// Find the closest armor
min_position_diff = position_diff;
yaw_diff = abs(orientationToYaw(armor.pose.orientation) - ekf_prediction(6));
tracked_armor = armor;
}
}
}
// Store tracker info
info_position_diff = min_position_diff;
info_yaw_diff = yaw_diff;
// Check if the distance and yaw difference of closest armor are within the threshold
if (min_position_diff < max_match_distance_ && yaw_diff < max_match_yaw_diff_) {
// Matched armor found
matched = true;
auto p = tracked_armor.pose.position;
// Update EKF
double measured_yaw = orientationToYaw(tracked_armor.pose.orientation);
measurement = Eigen::Vector4d(p.x, p.y, p.z, measured_yaw);
target_state = ekf.update(measurement);
RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "EKF update");
} else if (same_id_armors_count == 1 && yaw_diff > max_match_yaw_diff_) {
// Matched armor not found, but there is only one armor with the same id
// and yaw has jumped, take this case as the target is spinning and armor jumped
handleArmorJump(same_id_armor);
} else {
// No matched armor found
RCLCPP_WARN(rclcpp::get_logger("armor_tracker"), "No matched armor found!");
}
}
// Prevent radius from spreading
if (target_state(8) < 0.12) {
target_state(8) = 0.12;
ekf.setState(target_state);
} else if (target_state(8) > 0.4) {
target_state(8) = 0.4;
ekf.setState(target_state);
}
// Tracking state machine
if (tracker_state == DETECTING) {
if (matched) {
detect_count_++;
if (detect_count_ > tracking_thres) {
detect_count_ = 0;
tracker_state = TRACKING;
}
} else {
detect_count_ = 0;
tracker_state = LOST;
}
} else if (tracker_state == TRACKING) {
if (!matched) {
tracker_state = TEMP_LOST;
lost_count_++;
}
} else if (tracker_state == TEMP_LOST) {
if (!matched) {
lost_count_++;
if (lost_count_ > lost_thres) {
lost_count_ = 0;
tracker_state = LOST;
}
} else {
tracker_state = TRACKING;
lost_count_ = 0;
}
}
}
void Tracker::initEKF(const Armor & a)
{
double xa = a.pose.position.x;
double ya = a.pose.position.y;
double za = a.pose.position.z;
last_yaw_ = 0;
double yaw = orientationToYaw(a.pose.orientation);
// Set initial position at 0.2m behind the target
target_state = Eigen::VectorXd::Zero(9);
double r = 0.26;
double xc = xa + r * cos(yaw);
double yc = ya + r * sin(yaw);
dz = 0, another_r = r;
target_state << xc, 0, yc, 0, za, 0, yaw, 0, r;
ekf.setState(target_state);
}
void Tracker::updateArmorsNum(const Armor & armor)
{
if (armor.type == "large" && (tracked_id == "3" || tracked_id == "4" || tracked_id == "5")) {
tracked_armors_num = ArmorsNum::BALANCE_2;
} else if (tracked_id == "outpost") {
tracked_armors_num = ArmorsNum::OUTPOST_3;
} else {
tracked_armors_num = ArmorsNum::NORMAL_4;
}
}
void Tracker::handleArmorJump(const Armor & current_armor)
{
double yaw = orientationToYaw(current_armor.pose.orientation);
target_state(6) = yaw;
updateArmorsNum(current_armor);
// Only 4 armors has 2 radius and height
if (tracked_armors_num == ArmorsNum::NORMAL_4) {
dz = target_state(4) - current_armor.pose.position.z;
target_state(4) = current_armor.pose.position.z;
std::swap(target_state(8), another_r);
}
RCLCPP_WARN(rclcpp::get_logger("armor_tracker"), "Armor jump!");
// If position difference is larger than max_match_distance_,
// take this case as the ekf diverged, reset the state
auto p = current_armor.pose.position;
Eigen::Vector3d current_p(p.x, p.y, p.z);
Eigen::Vector3d infer_p = getArmorPositionFromState(target_state);
if ((current_p - infer_p).norm() > max_match_distance_) {
double r = target_state(8);
target_state(0) = p.x + r * cos(yaw); // xc
target_state(1) = 0; // vxc
target_state(2) = p.y + r * sin(yaw); // yc
target_state(3) = 0; // vyc
target_state(4) = p.z; // za
target_state(5) = 0; // vza
RCLCPP_ERROR(rclcpp::get_logger("armor_tracker"), "Reset State!");
}
ekf.setState(target_state);
}
double Tracker::orientationToYaw(const geometry_msgs::msg::Quaternion & q)
{
// Get armor yaw
tf2::Quaternion tf_q;
tf2::fromMsg(q, tf_q);
double roll, pitch, yaw;
tf2::Matrix3x3(tf_q).getRPY(roll, pitch, yaw);
// Make yaw change continuous (-pi~pi to -inf~inf)
yaw = last_yaw_ + angles::shortest_angular_distance(last_yaw_, yaw);
last_yaw_ = yaw;
return yaw;
}
Eigen::Vector3d Tracker::getArmorPositionFromState(const Eigen::VectorXd & x)
{
// Calculate predicted position of the current armor
double xc = x(0), yc = x(2), za = x(4);
double yaw = x(6), r = x(8);
double xa = xc - r * cos(yaw);
double ya = yc - r * sin(yaw);
return Eigen::Vector3d(xa, ya, za);
}
} // namespace rm_auto_aim

View File

@ -0,0 +1,336 @@
// Copyright 2022 Chen Jun
#include "armor_tracker/tracker_node.hpp"
// STD
#include <memory>
#include <vector>
namespace rm_auto_aim
{
ArmorTrackerNode::ArmorTrackerNode(const rclcpp::NodeOptions & options)
: Node("armor_tracker", options)
{
RCLCPP_INFO(this->get_logger(), "Starting TrackerNode!");
// Maximum allowable armor distance in the XOY plane
max_armor_distance_ = this->declare_parameter("max_armor_distance", 10.0);
// Tracker
double max_match_distance = this->declare_parameter("tracker.max_match_distance", 0.15);
double max_match_yaw_diff = this->declare_parameter("tracker.max_match_yaw_diff", 1.0);
tracker_ = std::make_unique<Tracker>(max_match_distance, max_match_yaw_diff);
tracker_->tracking_thres = this->declare_parameter("tracker.tracking_thres", 5);
lost_time_thres_ = this->declare_parameter("tracker.lost_time_thres", 0.3);
// EKF
// xa = x_armor, xc = x_robot_center
// state: xc, v_xc, yc, v_yc, za, v_za, yaw, v_yaw, r
// measurement: xa, ya, za, yaw
// f - Process function
auto f = [this](const Eigen::VectorXd & x) {
Eigen::VectorXd x_new = x;
x_new(0) += x(1) * dt_;
x_new(2) += x(3) * dt_;
x_new(4) += x(5) * dt_;
x_new(6) += x(7) * dt_;
return x_new;
};
// J_f - Jacobian of process function
auto j_f = [this](const Eigen::VectorXd &) {
Eigen::MatrixXd f(9, 9);
// clang-format off
f << 1, dt_, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, dt_, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, dt_, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, dt_, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 1;
// clang-format on
return f;
};
// h - Observation function
auto h = [](const Eigen::VectorXd & x) {
Eigen::VectorXd z(4);
double xc = x(0), yc = x(2), yaw = x(6), r = x(8);
z(0) = xc - r * cos(yaw); // xa
z(1) = yc - r * sin(yaw); // ya
z(2) = x(4); // za
z(3) = x(6); // yaw
return z;
};
// J_h - Jacobian of observation function
auto j_h = [](const Eigen::VectorXd & x) {
Eigen::MatrixXd h(4, 9);
double yaw = x(6), r = x(8);
// clang-format off
// xc v_xc yc v_yc za v_za yaw v_yaw r
h << 1, 0, 0, 0, 0, 0, r*sin(yaw), 0, -cos(yaw),
0, 0, 1, 0, 0, 0, -r*cos(yaw),0, -sin(yaw),
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0;
// clang-format on
return h;
};
// update_Q - process noise covariance matrix
s2qxyz_ = declare_parameter("ekf.sigma2_q_xyz", 20.0);
s2qyaw_ = declare_parameter("ekf.sigma2_q_yaw", 100.0);
s2qr_ = declare_parameter("ekf.sigma2_q_r", 800.0);
auto u_q = [this]() {
Eigen::MatrixXd q(9, 9);
double t = dt_, x = s2qxyz_, y = s2qyaw_, r = s2qr_;
double q_x_x = pow(t, 4) / 4 * x, q_x_vx = pow(t, 3) / 2 * x, q_vx_vx = pow(t, 2) * x;
double q_y_y = pow(t, 4) / 4 * y, q_y_vy = pow(t, 3) / 2 * x, q_vy_vy = pow(t, 2) * y;
double q_r = pow(t, 4) / 4 * r;
// clang-format off
// xc v_xc yc v_yc za v_za yaw v_yaw r
q << q_x_x, q_x_vx, 0, 0, 0, 0, 0, 0, 0,
q_x_vx, q_vx_vx,0, 0, 0, 0, 0, 0, 0,
0, 0, q_x_x, q_x_vx, 0, 0, 0, 0, 0,
0, 0, q_x_vx, q_vx_vx,0, 0, 0, 0, 0,
0, 0, 0, 0, q_x_x, q_x_vx, 0, 0, 0,
0, 0, 0, 0, q_x_vx, q_vx_vx,0, 0, 0,
0, 0, 0, 0, 0, 0, q_y_y, q_y_vy, 0,
0, 0, 0, 0, 0, 0, q_y_vy, q_vy_vy,0,
0, 0, 0, 0, 0, 0, 0, 0, q_r;
// clang-format on
return q;
};
// update_R - measurement noise covariance matrix
r_xyz_factor = declare_parameter("ekf.r_xyz_factor", 0.05);
r_yaw = declare_parameter("ekf.r_yaw", 0.02);
auto u_r = [this](const Eigen::VectorXd & z) {
Eigen::DiagonalMatrix<double, 4> r;
double x = r_xyz_factor;
r.diagonal() << abs(x * z[0]), abs(x * z[1]), abs(x * z[2]), r_yaw;
return r;
};
// P - error estimate covariance matrix
Eigen::DiagonalMatrix<double, 9> p0;
p0.setIdentity();
tracker_->ekf = ExtendedKalmanFilter{f, h, j_f, j_h, u_q, u_r, p0};
// Subscriber with tf2 message_filter
// tf2 relevant
tf2_buffer_ = std::make_shared<tf2_ros::Buffer>(this->get_clock());
// Create the timer interface before call to waitForTransform,
// to avoid a tf2_ros::CreateTimerInterfaceException exception
auto timer_interface = std::make_shared<tf2_ros::CreateTimerROS>(
this->get_node_base_interface(), this->get_node_timers_interface());
tf2_buffer_->setCreateTimerInterface(timer_interface);
tf2_listener_ = std::make_shared<tf2_ros::TransformListener>(*tf2_buffer_);
// subscriber and filter
armors_sub_.subscribe(this, "/detector/armors", rmw_qos_profile_sensor_data);
target_frame_ = this->declare_parameter("target_frame", "odom");
tf2_filter_ = std::make_shared<tf2_filter>(
armors_sub_, *tf2_buffer_, target_frame_, 10, this->get_node_logging_interface(),
this->get_node_clock_interface(), std::chrono::duration<int>(1));
// Register a callback with tf2_ros::MessageFilter to be called when transforms are available
tf2_filter_->registerCallback(&ArmorTrackerNode::armorsCallback, this);
// Measurement publisher (for debug usage)
info_pub_ = this->create_publisher<auto_aim_interfaces::msg::TrackerInfo>("/tracker/info", 10);
// Publisher
target_pub_ = this->create_publisher<auto_aim_interfaces::msg::Target>(
"/tracker/target", rclcpp::SensorDataQoS());
// Visualization Marker Publisher
// See http://wiki.ros.org/rviz/DisplayTypes/Marker
position_marker_.ns = "position";
position_marker_.type = visualization_msgs::msg::Marker::SPHERE;
position_marker_.scale.x = position_marker_.scale.y = position_marker_.scale.z = 0.1;
position_marker_.color.a = 1.0;
position_marker_.color.g = 1.0;
linear_v_marker_.type = visualization_msgs::msg::Marker::ARROW;
linear_v_marker_.ns = "linear_v";
linear_v_marker_.scale.x = 0.03;
linear_v_marker_.scale.y = 0.05;
linear_v_marker_.color.a = 1.0;
linear_v_marker_.color.r = 1.0;
linear_v_marker_.color.g = 1.0;
angular_v_marker_.type = visualization_msgs::msg::Marker::ARROW;
angular_v_marker_.ns = "angular_v";
angular_v_marker_.scale.x = 0.03;
angular_v_marker_.scale.y = 0.05;
angular_v_marker_.color.a = 1.0;
angular_v_marker_.color.b = 1.0;
angular_v_marker_.color.g = 1.0;
armor_marker_.ns = "armors";
armor_marker_.type = visualization_msgs::msg::Marker::CUBE;
armor_marker_.scale.x = 0.03;
armor_marker_.scale.z = 0.125;
armor_marker_.color.a = 1.0;
armor_marker_.color.r = 1.0;
marker_pub_ = this->create_publisher<visualization_msgs::msg::MarkerArray>("/tracker/marker", 10);
}
void ArmorTrackerNode::armorsCallback(const auto_aim_interfaces::msg::Armors::SharedPtr armors_msg)
{
// Tranform armor position from image frame to world coordinate
for (auto & armor : armors_msg->armors) {
geometry_msgs::msg::PoseStamped ps;
ps.header = armors_msg->header;
ps.pose = armor.pose;
try {
armor.pose = tf2_buffer_->transform(ps, target_frame_).pose;
} catch (const tf2::ExtrapolationException & ex) {
RCLCPP_ERROR(get_logger(), "Error while transforming %s", ex.what());
return;
}
}
// Filter abnormal armors
armors_msg->armors.erase(
std::remove_if(
armors_msg->armors.begin(), armors_msg->armors.end(),
[this](const auto_aim_interfaces::msg::Armor & armor) {
return abs(armor.pose.position.z) > 1.2 ||
Eigen::Vector2d(armor.pose.position.x, armor.pose.position.y).norm() >
max_armor_distance_;
}),
armors_msg->armors.end());
// Init message
auto_aim_interfaces::msg::TrackerInfo info_msg;
auto_aim_interfaces::msg::Target target_msg;
rclcpp::Time time = armors_msg->header.stamp;
target_msg.header.stamp = time;
target_msg.header.frame_id = target_frame_;
// Update tracker
if (tracker_->tracker_state == Tracker::LOST) {
tracker_->init(armors_msg);
target_msg.tracking = false;
} else {
dt_ = (time - last_time_).seconds();
tracker_->lost_thres = static_cast<int>(lost_time_thres_ / dt_);
tracker_->update(armors_msg);
// Publish Info
info_msg.position_diff = tracker_->info_position_diff;
info_msg.yaw_diff = tracker_->info_yaw_diff;
info_msg.position.x = tracker_->measurement(0);
info_msg.position.y = tracker_->measurement(1);
info_msg.position.z = tracker_->measurement(2);
info_msg.yaw = tracker_->measurement(3);
info_pub_->publish(info_msg);
if (tracker_->tracker_state == Tracker::DETECTING) {
target_msg.tracking = false;
} else if (
tracker_->tracker_state == Tracker::TRACKING ||
tracker_->tracker_state == Tracker::TEMP_LOST) {
target_msg.tracking = true;
// Fill target message
const auto & state = tracker_->target_state;
target_msg.id = tracker_->tracked_id;
target_msg.armors_num = static_cast<int>(tracker_->tracked_armors_num);
target_msg.position.x = state(0);
target_msg.velocity.x = state(1);
target_msg.position.y = state(2);
target_msg.velocity.y = state(3);
target_msg.position.z = state(4);
target_msg.velocity.z = state(5);
target_msg.yaw = state(6);
target_msg.v_yaw = state(7);
target_msg.radius_1 = state(8);
target_msg.radius_2 = tracker_->another_r;
target_msg.dz = tracker_->dz;
}
}
last_time_ = time;
target_pub_->publish(target_msg);
publishMarkers(target_msg);
}
void ArmorTrackerNode::publishMarkers(const auto_aim_interfaces::msg::Target & target_msg)
{
position_marker_.header = target_msg.header;
linear_v_marker_.header = target_msg.header;
angular_v_marker_.header = target_msg.header;
armor_marker_.header = target_msg.header;
visualization_msgs::msg::MarkerArray marker_array;
if (target_msg.tracking) {
double yaw = target_msg.yaw, r1 = target_msg.radius_1, r2 = target_msg.radius_2;
double xc = target_msg.position.x, yc = target_msg.position.y, za = target_msg.position.z;
double vx = target_msg.velocity.x, vy = target_msg.velocity.y, vz = target_msg.velocity.z;
double dz = target_msg.dz;
position_marker_.action = visualization_msgs::msg::Marker::ADD;
position_marker_.pose.position.x = xc;
position_marker_.pose.position.y = yc;
position_marker_.pose.position.z = za + dz / 2;
linear_v_marker_.action = visualization_msgs::msg::Marker::ADD;
linear_v_marker_.points.clear();
linear_v_marker_.points.emplace_back(position_marker_.pose.position);
geometry_msgs::msg::Point arrow_end = position_marker_.pose.position;
arrow_end.x += vx;
arrow_end.y += vy;
arrow_end.z += vz;
linear_v_marker_.points.emplace_back(arrow_end);
angular_v_marker_.action = visualization_msgs::msg::Marker::ADD;
angular_v_marker_.points.clear();
angular_v_marker_.points.emplace_back(position_marker_.pose.position);
arrow_end = position_marker_.pose.position;
arrow_end.z += target_msg.v_yaw / M_PI;
angular_v_marker_.points.emplace_back(arrow_end);
armor_marker_.action = visualization_msgs::msg::Marker::ADD;
armor_marker_.scale.y = tracker_->tracked_armor.type == "small" ? 0.135 : 0.23;
bool is_current_pair = true;
size_t a_n = target_msg.armors_num;
geometry_msgs::msg::Point p_a;
double r = 0;
for (size_t i = 0; i < a_n; i++) {
double tmp_yaw = yaw + i * (2 * M_PI / a_n);
// Only 4 armors has 2 radius and height
if (a_n == 4) {
r = is_current_pair ? r1 : r2;
p_a.z = za + (is_current_pair ? 0 : dz);
is_current_pair = !is_current_pair;
} else {
r = r1;
p_a.z = za;
}
p_a.x = xc - r * cos(tmp_yaw);
p_a.y = yc - r * sin(tmp_yaw);
armor_marker_.id = i;
armor_marker_.pose.position = p_a;
tf2::Quaternion q;
q.setRPY(0, target_msg.id == "outpost" ? -0.26 : 0.26, tmp_yaw);
armor_marker_.pose.orientation = tf2::toMsg(q);
marker_array.markers.emplace_back(armor_marker_);
}
} else {
position_marker_.action = visualization_msgs::msg::Marker::DELETE;
linear_v_marker_.action = visualization_msgs::msg::Marker::DELETE;
angular_v_marker_.action = visualization_msgs::msg::Marker::DELETE;
armor_marker_.action = visualization_msgs::msg::Marker::DELETE;
marker_array.markers.emplace_back(armor_marker_);
}
marker_array.markers.emplace_back(position_marker_);
marker_array.markers.emplace_back(linear_v_marker_);
marker_array.markers.emplace_back(angular_v_marker_);
marker_pub_->publish(marker_array);
}
} // namespace rm_auto_aim
#include "rclcpp_components/register_node_macro.hpp"
// Register the component with class_loader.
// This acts as a sort of entry point, allowing the component to be discoverable when its library
// is being loaded into a running process.
RCLCPP_COMPONENTS_REGISTER_NODE(rm_auto_aim::ArmorTrackerNode)

View File

@ -0,0 +1,30 @@
cmake_minimum_required(VERSION 3.8)
project(auto_aim_interfaces)
if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
add_compile_options(-Wall -Wextra -Wpedantic)
endif()
# find dependencies
find_package(ament_cmake REQUIRED)
find_package(rosidl_default_generators REQUIRED)
find_package(geometry_msgs REQUIRED)
rosidl_generate_interfaces(${PROJECT_NAME}
"msg/Armor.msg"
"msg/Armors.msg"
"msg/Target.msg"
"msg/DebugLight.msg"
"msg/DebugLights.msg"
"msg/DebugArmor.msg"
"msg/DebugArmors.msg"
"msg/TrackerInfo.msg"
DEPENDENCIES
std_msgs
geometry_msgs
)
ament_package()

View File

@ -0,0 +1,4 @@
string number
string type
float32 distance_to_image_center
geometry_msgs/Pose pose

View File

@ -0,0 +1,2 @@
std_msgs/Header header
Armor[] armors

View File

@ -0,0 +1,5 @@
int32 center_x
string type
float32 light_ratio
float32 center_distance
float32 angle

View File

@ -0,0 +1 @@
DebugArmor[] data

View File

@ -0,0 +1,4 @@
int32 center_x
bool is_light
float32 ratio
float32 angle

View File

@ -0,0 +1 @@
DebugLight[] data

View File

@ -0,0 +1,11 @@
std_msgs/Header header
bool tracking
string id
int32 armors_num
geometry_msgs/Point position
geometry_msgs/Vector3 velocity
float64 yaw
float64 v_yaw
float64 radius_1
float64 radius_2
float64 dz

View File

@ -0,0 +1,6 @@
# Difference between the current measurement and prediction
float64 position_diff
float64 yaw_diff
# Unfiltered position and yaw
geometry_msgs/Point position
float64 yaw

View File

@ -0,0 +1,21 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>auto_aim_interfaces</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="chen.junn@outlook.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<build_depend>rosidl_default_generators</build_depend>
<exec_depend>rosidl_default_runtime</exec_depend>
<member_of_group>rosidl_interface_packages</member_of_group>
<depend>geometry_msgs</depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="100%" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="100%" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:vectornator="http://vectornator.io" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="matrix(1 0 0 1 6.39488e-14 157.46)" gradientUnits="userSpaceOnUse" id="LinearGradient" x1="375.972" x2="944.796" y1="453.158" y2="453.158">
<stop offset="0" stop-color="#ff8b00"/>
<stop offset="1" stop-color="#ff5900"/>
</linearGradient>
</defs>
<g id="图层-2" vectornator:layerName="图层 2">
<path d="M79.2038 285.449L524.109 285.848C524.109 285.848 651.69 302.769 654.969 439.05C658.247 575.332 521.965 594.533 521.965 594.533L292.315 596.568L242.362 778.529L101.844 777.892L181.788 485.967L512.496 484.668C512.496 484.668 542.064 478.698 541.78 438.895C541.496 399.092 503.683 395.965 503.683 395.965L340.206 395.68L406.734 471.875L248.235 472.328L79.2038 285.449Z" fill="#5a5655" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="图层-3" vectornator:layerName="图层 3">
<path d="M375.972 608.813L535.485 608.74L576.68 655.417L788.749 378.975L944.796 378.807L592.763 842.427" fill="url(#LinearGradient)" fill-rule="nonzero" opacity="1" stroke="#5a5655" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="0"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -0,0 +1,12 @@
cmake_minimum_required(VERSION 3.8)
project(rm_auto_aim)
find_package(ament_cmake_auto REQUIRED)
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
set(ament_cmake_copyright_FOUND TRUE)
ament_lint_auto_find_test_dependencies()
endif()
ament_auto_package()

View File

@ -0,0 +1,22 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>rm_auto_aim</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="chen.junn@outlook.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<depend>auto_aim_interfaces</depend>
<depend>armor_detector</depend>
<depend>armor_tracker</depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

@ -1 +0,0 @@
Subproject commit 444da86d28d2506deaf840441b38f8963cd87c10

51
src/rm_gimbal_description/.gitignore vendored Normal file
View File

@ -0,0 +1,51 @@
devel/
logs/
build/
bin/
lib/
msg_gen/
srv_gen/
msg/*Action.msg
msg/*ActionFeedback.msg
msg/*ActionGoal.msg
msg/*ActionResult.msg
msg/*Feedback.msg
msg/*Goal.msg
msg/*Result.msg
msg/_*.py
build_isolated/
devel_isolated/
# Generated by dynamic reconfigure
*.cfgc
/cfg/cpp/
/cfg/*.py
# Ignore generated docs
*.dox
*.wikidoc
# eclipse stuff
.project
.cproject
# qcreator stuff
CMakeLists.txt.user
srv/_*.py
*.pcd
*.pyc
qtcreator-*
*.user
/planning/cfg
/planning/docs
/planning/src
*~
# Emacs
.#*
# Catkin custom files
CATKIN_IGNORE

View File

@ -0,0 +1,18 @@
cmake_minimum_required(VERSION 3.8)
project(rm_gimbal_description)
# find dependencies
find_package(ament_cmake REQUIRED)
# Install files
install(DIRECTORY
urdf
DESTINATION share/${PROJECT_NAME})
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
set(ament_cmake_copyright_FOUND TRUE)
ament_lint_auto_find_test_dependencies()
endif()
ament_package()

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,26 @@
# rm_gimbal_description
RoboMaster 视觉自瞄系统所需的 URDF
<img src="docs/rm_vision.svg" alt="rm_vision" width="200" height="200">
该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块
## 坐标系定义
单位和方向请参考 https://www.ros.org/reps/rep-0103.html
odom: 以云台中心为原点的惯性系
yaw_joint: 表述云台的 yaw 轴与惯性系的旋转关系
pitch_joint: 表述云台的 pitch 轴与惯性系的旋转关系
camera_joint: 表述相机到惯性系的变换关系
camera_optical_joint: 表述以 z 轴为前方的相机坐标系转换为 x 轴为前方的相机坐标系的旋转关系
## 使用方法
修改 [urdf/rm_gimbal.urdf.xacro](urdf/rm_gimbal.urdf.xacro) 中的 `gimbal_camera_transfrom`
xyz 与 rpy 对应机器人云台上相机到云台中心的平移与旋转关系,可以由机械图纸测量得到,或在机器人上直接测量

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="100%" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="100%" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:vectornator="http://vectornator.io" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="matrix(1 0 0 1 6.39488e-14 157.46)" gradientUnits="userSpaceOnUse" id="LinearGradient" x1="375.972" x2="944.796" y1="453.158" y2="453.158">
<stop offset="0" stop-color="#ff8b00"/>
<stop offset="1" stop-color="#ff5900"/>
</linearGradient>
</defs>
<g id="图层-2" vectornator:layerName="图层 2">
<path d="M79.2038 285.449L524.109 285.848C524.109 285.848 651.69 302.769 654.969 439.05C658.247 575.332 521.965 594.533 521.965 594.533L292.315 596.568L242.362 778.529L101.844 777.892L181.788 485.967L512.496 484.668C512.496 484.668 542.064 478.698 541.78 438.895C541.496 399.092 503.683 395.965 503.683 395.965L340.206 395.68L406.734 471.875L248.235 472.328L79.2038 285.449Z" fill="#5a5655" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="图层-3" vectornator:layerName="图层 3">
<path d="M375.972 608.813L535.485 608.74L576.68 655.417L788.749 378.975L944.796 378.807L592.763 842.427" fill="url(#LinearGradient)" fill-rule="nonzero" opacity="1" stroke="#5a5655" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="0"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -0,0 +1,21 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>rm_gimbal_description</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="sink.chen@foxmail.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<exec_depend>launch_ros</exec_depend>
<exec_depend>xacro</exec_depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -0,0 +1,44 @@
<?xml version="1.0" encoding="utf-8"?>
<robot name="rm_gimbal"
xmlns:xacro="http://ros.org/wiki/xacro">
<xacro:arg name="xyz" default="0.10 0 0.05" />
<xacro:arg name="rpy" default="0 0 0" />
<link name="odom" />
<link name="yaw_link" />
<joint name="yaw_joint" type="continuous">
<parent link="odom" />
<child link="yaw_link" />
<axis xyz="0 0 1" />
</joint>
<link name="pitch_link" />
<joint name="pitch_joint" type="continuous">
<parent link="yaw_link" />
<child link="pitch_link" />
<axis xyz="0 1 0" />
</joint>
<link name="camera_link" />
<joint name="camera_joint" type="fixed">
<origin xyz="$(arg xyz)" rpy="$(arg rpy)" />
<parent link="pitch_link" />
<child link="camera_link" />
<axis xyz="0 0 0" />
</joint>
<link name="camera_optical_frame" />
<joint name="camera_optical_joint" type="fixed">
<origin xyz="0 0 0" rpy="${-pi/2} 0 ${-pi/2}" />
<parent link="camera_link" />
<child link="camera_optical_frame" />
</joint>
</robot>

@ -1 +0,0 @@
Subproject commit 60ec93a206d9705f60f8f8ffb5cffe41b5708ee3

33
src/rm_vision/.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,33 @@
name: rm_vision CI
on:
push:
branches: [ main ]
jobs:
docker-build-and-push:
if: github.event_name == 'push'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: chenjunnn
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build and push
uses: docker/build-push-action@v4
with:
context: .
file: ./Dockerfile
push: true
tags: chenjunnn/rm_vision:lastest

41
src/rm_vision/Dockerfile Normal file
View File

@ -0,0 +1,41 @@
FROM ros:humble-ros-base
# create workspace
RUN mkdir -p /ros_ws/src
WORKDIR /ros_ws/
# clone projects
RUN cd src && git clone https://github.com/chenjunnn/rm_auto_aim --depth=1 && \
git clone https://github.com/chenjunnn/ros2_mindvision_camera --depth=1 && \
git clone https://github.com/chenjunnn/ros2_hik_camera --depth=1 && \
git clone https://github.com/chenjunnn/rm_gimbal_description --depth=1 && \
git clone https://github.com/chenjunnn/rm_serial_driver --depth=1 && \
git clone https://github.com/chenjunnn/rm_vision --depth=1
# install dependencies and some tools
RUN apt-get update && rosdep install --from-paths src --ignore-src -r -y && \
apt-get install ros-humble-foxglove-bridge wget htop vim -y && \
rm -rf /var/lib/apt/lists/*
# setup zsh
RUN sh -c "$(wget -O- https://github.com/deluan/zsh-in-docker/releases/download/v1.1.2/zsh-in-docker.sh)" -- \
-t jispwoso -p git \
-p https://github.com/zsh-users/zsh-autosuggestions \
-p https://github.com/zsh-users/zsh-syntax-highlighting && \
chsh -s /bin/zsh && \
rm -rf /var/lib/apt/lists/*
# build
RUN . /opt/ros/humble/setup.sh && colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release
# setup .zshrc
RUN echo 'export TERM=xterm-256color\n\
source /ros_ws/install/setup.zsh\n\
eval "$(register-python-argcomplete3 ros2)"\n\
eval "$(register-python-argcomplete3 colcon)"\n'\
>> /root/.zshrc
# source entrypoint setup
RUN sed --in-place --expression \
'$isource "/ros_ws/install/setup.bash"' \
/ros_entrypoint.sh

21
src/rm_vision/LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 ChenJun
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

65
src/rm_vision/README.md Normal file
View File

@ -0,0 +1,65 @@
# rm_vision
<img src="docs/rm_vision.svg" alt="rm_vision" width="200" height="200">
## Overview
rm_vision 项目旨在为 RoboMaster 队伍提供一个规范、易用、鲁棒、高性能的视觉框架方案,为 RM 开源生态的建设添砖加瓦
如果本开源项目对于贵战队的视觉技术发展起到了实质性的帮助作用,请在机器人上贴上以下标签以助力该项目的推广,十分感激!
[<img src="docs/rm_vision_inside.svg" alt="rm_vision_inside" width="100" height="100">](docs/rm_vision_inside.svg)
QQ 交流群797203945
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
[![Build Status](https://github.com/chenjunnn/rm_vision/actions/workflows/ci.yml/badge.svg)](https://github.com/chenjunnn/rm_vision/actions/workflows/ci.yml)
## 包含项目
装甲板自动瞄准算法模块 https://github.com/chenjunnn/rm_auto_aim
MindVision 相机模块 https://github.com/chenjunnn/ros2_mindvision_camera
HikVision 相机模块 https://github.com/chenjunnn/ros2_hik_camera
机器人云台描述文件 https://github.com/chenjunnn/rm_gimbal_description
串口通讯模块 https://github.com/chenjunnn/rm_serial_driver
视觉算法仿真器 https://github.com/chenjunnn/rm_vision_simulator
## 通过 Docker 部署
拉取镜像
```
docker pull chenjunnn/rm_vision:lastest
```
构建开发容器
```
docker run -it --name rv_devel \
--privileged --network host \
-v /dev:/dev -v $HOME/.ros:/root/.ros -v ws:/ros_ws \
chenjunnn/rm_vision:lastest \
ros2 launch foxglove_bridge foxglove_bridge_launch.xml
```
构建运行容器
```
docker run -it --name rv_runtime \
--privileged --network host --restart always \
-v /dev:/dev -v $HOME/.ros:/root/.ros -v ws:/ros_ws \
chenjunnn/rm_vision:lastest \
ros2 launch rm_vision_bringup vision_bringup.launch.py
```
TBD
## 源码编译
TBD

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="100%" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="100%" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:vectornator="http://vectornator.io" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="matrix(1 0 0 1 6.39488e-14 157.46)" gradientUnits="userSpaceOnUse" id="LinearGradient" x1="375.972" x2="944.796" y1="453.158" y2="453.158">
<stop offset="0" stop-color="#ff8b00"/>
<stop offset="1" stop-color="#ff5900"/>
</linearGradient>
</defs>
<g id="图层-2" vectornator:layerName="图层 2">
<path d="M79.2038 285.449L524.109 285.848C524.109 285.848 651.69 302.769 654.969 439.05C658.247 575.332 521.965 594.533 521.965 594.533L292.315 596.568L242.362 778.529L101.844 777.892L181.788 485.967L512.496 484.668C512.496 484.668 542.064 478.698 541.78 438.895C541.496 399.092 503.683 395.965 503.683 395.965L340.206 395.68L406.734 471.875L248.235 472.328L79.2038 285.449Z" fill="#5a5655" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="图层-3" vectornator:layerName="图层 3">
<path d="M375.972 608.813L535.485 608.74L576.68 655.417L788.749 378.975L944.796 378.807L592.763 842.427" fill="url(#LinearGradient)" fill-rule="nonzero" opacity="1" stroke="#5a5655" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="0"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 7.9 KiB

View File

@ -0,0 +1,16 @@
cmake_minimum_required(VERSION 3.8)
project(rm_vision_bringup)
find_package(ament_cmake_auto REQUIRED)
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
set(ament_cmake_copyright_FOUND TRUE)
ament_lint_auto_find_test_dependencies()
endif()
ament_auto_package(
INSTALL_TO_SHARE
launch
config
)

View File

@ -0,0 +1,26 @@
image_width: 1280
image_height: 1024
camera_name: narrow_stereo
camera_matrix:
rows: 3
cols: 3
data: [1560.46759, 0. , 627.94252,
0. , 1563.0218 , 519.00352,
0. , 0. , 1. ]
distortion_model: plumb_bob
distortion_coefficients:
rows: 1
cols: 5
data: [-0.059454, 0.136857, -0.000134, 0.001950, 0.000000]
rectification_matrix:
rows: 3
cols: 3
data: [1., 0., 0.,
0., 1., 0.,
0., 0., 1.]
projection_matrix:
rows: 3
cols: 4
data: [1550.59436, 0. , 629.28898, 0. ,
0. , 1554.92236, 518.36577, 0. ,
0. , 0. , 1. , 0. ]

View File

@ -0,0 +1,9 @@
camera: mv # hik
odom2camera:
xyz: "\"0.0 0.0 0.0025\"" # ""\"0.135 0.078 0.08\"" "\"0.107 -0.05 0.052\""
rpy: "\"0.0 0.0 0.0\"" # "\"0.0 0.157 0.0\""
detector_log_level: INFO
tracker_log_level: INFO
serial_log_level: INFO

View File

@ -0,0 +1,48 @@
/camera_node:
ros__parameters:
camera_info_url: package://rm_vision_bringup/config/camera_info.yaml
exposure_time: 4500
gain: 6.0
flip_image: true
/serial_driver:
ros__parameters:
timestamp_offset: 0.003
device_name: /dev/ttyACM0
baud_rate: 115200
flow_control: none
parity: none
stop_bits: "1"
/armor_detector:
ros__parameters:
debug: true
detect_color: 0
binary_thres: 80
light.min_ratio: 0.1
armor.min_light_ratio: 0.75
classifier_threshold: 0.8
ignore_classes: ["negative"]
/armor_tracker:
ros__parameters:
target_frame: odom
max_armor_distance: 10.0
ekf:
sigma2_q_xyz: 0.01 #0.01
sigma2_q_yaw: 5.0 #1.0
sigma2_q_r: 80.0
r_xyz_factor: 4e-4
r_yaw: 5e-3
tracker:
max_match_distance: 0.25
max_match_yaw_diff: 0.85
tracking_thres: 5
lost_time_thres: 1.0

View File

@ -0,0 +1,32 @@
import os
import yaml
from ament_index_python.packages import get_package_share_directory
from launch.substitutions import Command
from launch_ros.actions import Node
launch_params = yaml.safe_load(open(os.path.join(
get_package_share_directory('rm_vision_bringup'), 'config', 'launch_params.yaml')))
robot_description = Command(['xacro ', os.path.join(
get_package_share_directory('rm_gimbal_description'), 'urdf', 'rm_gimbal.urdf.xacro'),
' xyz:=', launch_params['odom2camera']['xyz'], ' rpy:=', launch_params['odom2camera']['rpy']])
robot_state_publisher = Node(
package='robot_state_publisher',
executable='robot_state_publisher',
parameters=[{'robot_description': robot_description,
'publish_frequency': 1000.0}]
)
node_params = os.path.join(
get_package_share_directory('rm_vision_bringup'), 'config', 'node_params.yaml')
tracker_node = Node(
package='armor_tracker',
executable='armor_tracker_node',
output='both',
emulate_tty=True,
parameters=[node_params],
ros_arguments=['--log-level', 'armor_tracker:='+launch_params['tracker_log_level']],
)

View File

@ -0,0 +1,27 @@
import os
import sys
from ament_index_python.packages import get_package_share_directory
sys.path.append(os.path.join(get_package_share_directory('rm_vision_bringup'), 'launch'))
def generate_launch_description():
from common import launch_params, robot_state_publisher, node_params, tracker_node
from launch_ros.actions import Node
from launch import LaunchDescription
detector_node = Node(
package='armor_detector',
executable='armor_detector_node',
emulate_tty=True,
output='both',
parameters=[node_params],
arguments=['--ros-args', '--log-level',
'armor_detector:='+launch_params['detector_log_level']],
)
return LaunchDescription([
robot_state_publisher,
detector_node,
tracker_node,
])

View File

@ -0,0 +1,82 @@
import os
import sys
from ament_index_python.packages import get_package_share_directory
sys.path.append(os.path.join(get_package_share_directory('rm_vision_bringup'), 'launch'))
def generate_launch_description():
from common import node_params, launch_params, robot_state_publisher, tracker_node
from launch_ros.descriptions import ComposableNode
from launch_ros.actions import ComposableNodeContainer, Node
from launch.actions import TimerAction, Shutdown
from launch import LaunchDescription
def get_camera_node(package, plugin):
return ComposableNode(
package=package,
plugin=plugin,
name='camera_node',
parameters=[node_params],
extra_arguments=[{'use_intra_process_comms': True}]
)
def get_camera_detector_container(camera_node):
return ComposableNodeContainer(
name='camera_detector_container',
namespace='',
package='rclcpp_components',
executable='component_container',
composable_node_descriptions=[
camera_node,
ComposableNode(
package='armor_detector',
plugin='rm_auto_aim::ArmorDetectorNode',
name='armor_detector',
parameters=[node_params],
extra_arguments=[{'use_intra_process_comms': True}]
)
],
output='screen',
emulate_tty=True,
ros_arguments=['--ros-args', '--log-level',
'armor_detector:='+launch_params['detector_log_level']],
on_exit=Shutdown(),
)
hik_camera_node = get_camera_node('hik_camera', 'hik_camera::HikCameraNode')
mv_camera_node = get_camera_node('mindvision_camera', 'mindvision_camera::MVCameraNode')
if (launch_params['camera'] == 'hik'):
cam_detector = get_camera_detector_container(hik_camera_node)
elif (launch_params['camera'] == 'mv'):
cam_detector = get_camera_detector_container(mv_camera_node)
serial_driver_node = Node(
package='rm_serial_driver',
executable='rm_serial_driver_node',
name='serial_driver',
output='both',
emulate_tty=True,
parameters=[node_params],
on_exit=Shutdown(),
ros_arguments=['--ros-args', '--log-level',
'serial_driver:='+launch_params['serial_log_level']],
)
delay_serial_node = TimerAction(
period=1.5,
actions=[serial_driver_node],
)
delay_tracker_node = TimerAction(
period=2.0,
actions=[tracker_node],
)
return LaunchDescription([
robot_state_publisher,
cam_detector,
delay_serial_node,
delay_tracker_node,
])

View File

@ -0,0 +1,18 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>rm_vision_bringup</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="chen.junn@outlook.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<depend>rm_auto_aim</depend>
<depend>rm_serial_driver</depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

@ -1 +0,0 @@
Subproject commit ebe1f1a934efc0b1fcb75758c0a34fdb56dcd0f3

View File

@ -0,0 +1,18 @@
---
Language: Cpp
BasedOnStyle: Google
AccessModifierOffset: -2
AlignAfterOpenBracket: AlwaysBreak
BraceWrapping:
AfterClass: true
AfterFunction: true
AfterNamespace: true
AfterStruct: true
BreakBeforeBraces: Custom
ColumnLimit: 100
ConstructorInitializerIndentWidth: 0
ContinuationIndentWidth: 2
DerivePointerAlignment: false
PointerAlignment: Middle
ReflowComments: false

View File

@ -0,0 +1,55 @@
---
Checks: '-*,
performance-*,
-performance-unnecessary-value-param,
llvm-namespace-comment,
modernize-redundant-void-arg,
modernize-use-nullptr,
modernize-use-default,
modernize-use-override,
modernize-loop-convert,
modernize-make-shared,
modernize-make-unique,
misc-unused-parameters,
readability-named-parameter,
readability-redundant-smartptr-get,
readability-redundant-string-cstr,
readability-simplify-boolean-expr,
readability-container-size-empty,
readability-identifier-naming,
'
HeaderFilterRegex: ''
AnalyzeTemporaryDtors: false
CheckOptions:
- key: llvm-namespace-comment.ShortNamespaceLines
value: '10'
- key: llvm-namespace-comment.SpacesBeforeComments
value: '2'
- key: misc-unused-parameters.StrictMode
value: '1'
- key: readability-braces-around-statements.ShortStatementLines
value: '2'
# type names
- key: readability-identifier-naming.ClassCase
value: CamelCase
- key: readability-identifier-naming.EnumCase
value: CamelCase
- key: readability-identifier-naming.UnionCase
value: CamelCase
# method names
- key: readability-identifier-naming.MethodCase
value: camelBack
# variable names
- key: readability-identifier-naming.VariableCase
value: lower_case
- key: readability-identifier-naming.ClassMemberSuffix
value: '_'
# const static or global variables are UPPER_CASE
- key: readability-identifier-naming.EnumConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.StaticConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.ClassConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.GlobalVariableCase
value: UPPER_CASE

View File

@ -0,0 +1,67 @@
cmake_minimum_required(VERSION 3.8)
project(hik_camera)
## Use C++14
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
## By adding -Wall and -Werror, the compiler does not ignore warnings anymore,
## enforcing cleaner code.
add_definitions(-Wall -Werror)
## Export compile commands for clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
find_package(ament_cmake_auto REQUIRED)
ament_auto_find_build_dependencies()
ament_auto_add_library(${PROJECT_NAME} SHARED
src/hik_camera_node.cpp
)
target_include_directories(${PROJECT_NAME} PUBLIC hikSDK/include)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64")
target_link_directories(${PROJECT_NAME} PUBLIC hikSDK/lib/amd64)
install(
DIRECTORY hikSDK/lib/amd64/
DESTINATION lib
)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64")
target_link_directories(${PROJECT_NAME} PUBLIC hikSDK/lib/arm64)
install(
DIRECTORY hikSDK/lib/arm64/
DESTINATION lib
)
else()
message(FATAL_ERROR "Unsupport host system architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}!")
endif()
target_link_libraries(${PROJECT_NAME}
FormatConversion
MediaProcess
MvCameraControl
MVRender
MvUsb3vTL
)
rclcpp_components_register_node(${PROJECT_NAME}
PLUGIN hik_camera::HikCameraNode
EXECUTABLE ${PROJECT_NAME}_node
)
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
list(APPEND AMENT_LINT_AUTO_EXCLUDE
ament_cmake_copyright
ament_cmake_cpplint
ament_cmake_uncrustify
)
ament_lint_auto_find_test_dependencies()
endif()
ament_auto_package(
INSTALL_TO_SHARE
config
launch
)

View File

@ -0,0 +1,14 @@
# ros2_hik_camera
A ROS2 packge for Hikvision USB3.0 industrial camera
## Usage
```
ros2 launch hik_camera hik_camera.launch.py
```
## Params
- exposure_time
- gain

View File

@ -0,0 +1,26 @@
image_width: 1280
image_height: 1024
camera_name: narrow_stereo
camera_matrix:
rows: 3
cols: 3
data: [1241.71015, 0. , 623.06833,
0. , 1243.71255, 481.6889 ,
0. , 0. , 1. ]
distortion_model: plumb_bob
distortion_coefficients:
rows: 1
cols: 5
data: [-0.067569, 0.064478, -0.000970, -0.001436, 0.000000]
rectification_matrix:
rows: 3
cols: 3
data: [1., 0., 0.,
0., 1., 0.,
0., 0., 1.]
projection_matrix:
rows: 3
cols: 4
data: [1223.57971, 0. , 620.78488, 0. ,
0. , 1230.05701, 480.10211, 0. ,
0. , 0. , 1. , 0. ]

View File

@ -0,0 +1,6 @@
/hik_camera:
ros__parameters:
camera_name: narrow_stereo
exposure_time: 4500
gain: 6.0

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,104 @@
#ifndef _MV_ERROR_DEFINE_H_
#define _MV_ERROR_DEFINE_H_
/********************************************************************/
/// \~chinese
/// \name 正确码定义
/// @{
/// \~english
/// \name Definition of correct code
/// @{
#define MV_OK 0x00000000 ///< \~chinese 成功,无错误 \~english Successed, no error
/// @}
/********************************************************************/
/// \~chinese
/// \name 通用错误码定义:范围0x80000000-0x800000FF
/// @{
/// \~english
/// \name Definition of General error code
/// @{
#define MV_E_HANDLE 0x80000000 ///< \~chinese 错误或无效的句柄 \~english Error or invalid handle
#define MV_E_SUPPORT 0x80000001 ///< \~chinese 不支持的功能 \~english Not supported function
#define MV_E_BUFOVER 0x80000002 ///< \~chinese 缓存已满 \~english Buffer overflow
#define MV_E_CALLORDER 0x80000003 ///< \~chinese 函数调用顺序错误 \~english Function calling order error
#define MV_E_PARAMETER 0x80000004 ///< \~chinese 错误的参数 \~english Incorrect parameter
#define MV_E_RESOURCE 0x80000006 ///< \~chinese 资源申请失败 \~english Applying resource failed
#define MV_E_NODATA 0x80000007 ///< \~chinese 无数据 \~english No data
#define MV_E_PRECONDITION 0x80000008 ///< \~chinese 前置条件有误,或运行环境已发生变化 \~english Precondition error, or running environment changed
#define MV_E_VERSION 0x80000009 ///< \~chinese 版本不匹配 \~english Version mismatches
#define MV_E_NOENOUGH_BUF 0x8000000A ///< \~chinese 传入的内存空间不足 \~english Insufficient memory
#define MV_E_ABNORMAL_IMAGE 0x8000000B ///< \~chinese 异常图像,可能是丢包导致图像不完整 \~english Abnormal image, maybe incomplete image because of lost packet
#define MV_E_LOAD_LIBRARY 0x8000000C ///< \~chinese 动态导入DLL失败 \~english Load library failed
#define MV_E_NOOUTBUF 0x8000000D ///< \~chinese 没有可输出的缓存 \~english No Avaliable Buffer
#define MV_E_UNKNOW 0x800000FF ///< \~chinese 未知的错误 \~english Unknown error
/// @}
/********************************************************************/
/// \~chinese
/// \name GenICam系列错误:范围0x80000100-0x800001FF
/// @{
/// \~english
/// \name GenICam Series Error Codes: Range from 0x80000100 to 0x800001FF
/// @{
#define MV_E_GC_GENERIC 0x80000100 ///< \~chinese 通用错误 \~english General error
#define MV_E_GC_ARGUMENT 0x80000101 ///< \~chinese 参数非法 \~english Illegal parameters
#define MV_E_GC_RANGE 0x80000102 ///< \~chinese 值超出范围 \~english The value is out of range
#define MV_E_GC_PROPERTY 0x80000103 ///< \~chinese 属性 \~english Property
#define MV_E_GC_RUNTIME 0x80000104 ///< \~chinese 运行环境有问题 \~english Running environment error
#define MV_E_GC_LOGICAL 0x80000105 ///< \~chinese 逻辑错误 \~english Logical error
#define MV_E_GC_ACCESS 0x80000106 ///< \~chinese 节点访问条件有误 \~english Node accessing condition error
#define MV_E_GC_TIMEOUT 0x80000107 ///< \~chinese 超时 \~english Timeout
#define MV_E_GC_DYNAMICCAST 0x80000108 ///< \~chinese 转换异常 \~english Transformation exception
#define MV_E_GC_UNKNOW 0x800001FF ///< \~chinese GenICam未知错误 \~english GenICam unknown error
/// @}
/********************************************************************/
/// \~chinese
/// \name GigE_STATUS对应的错误码:范围0x80000200-0x800002FF
/// @{
/// \~english
/// \name GigE_STATUS Error Codes: Range from 0x80000200 to 0x800002FF
/// @{
#define MV_E_NOT_IMPLEMENTED 0x80000200 ///< \~chinese 命令不被设备支持 \~english The command is not supported by device
#define MV_E_INVALID_ADDRESS 0x80000201 ///< \~chinese 访问的目标地址不存在 \~english The target address being accessed does not exist
#define MV_E_WRITE_PROTECT 0x80000202 ///< \~chinese 目标地址不可写 \~english The target address is not writable
#define MV_E_ACCESS_DENIED 0x80000203 ///< \~chinese 设备无访问权限 \~english No permission
#define MV_E_BUSY 0x80000204 ///< \~chinese 设备忙,或网络断开 \~english Device is busy, or network disconnected
#define MV_E_PACKET 0x80000205 ///< \~chinese 网络包数据错误 \~english Network data packet error
#define MV_E_NETER 0x80000206 ///< \~chinese 网络相关错误 \~english Network error
#define MV_E_IP_CONFLICT 0x80000221 ///< \~chinese 设备IP冲突 \~english Device IP conflict
/// @}
/********************************************************************/
/// \~chinese
/// \name USB_STATUS对应的错误码:范围0x80000300-0x800003FF
/// @{
/// \~english
/// \name USB_STATUS Error Codes: Range from 0x80000300 to 0x800003FF
/// @{
#define MV_E_USB_READ 0x80000300 ///< \~chinese 读usb出错 \~english Reading USB error
#define MV_E_USB_WRITE 0x80000301 ///< \~chinese 写usb出错 \~english Writing USB error
#define MV_E_USB_DEVICE 0x80000302 ///< \~chinese 设备异常 \~english Device exception
#define MV_E_USB_GENICAM 0x80000303 ///< \~chinese GenICam相关错误 \~english GenICam error
#define MV_E_USB_BANDWIDTH 0x80000304 ///< \~chinese 带宽不足 该错误码新增 \~english Insufficient bandwidth, this error code is newly added
#define MV_E_USB_DRIVER 0x80000305 ///< \~chinese 驱动不匹配或者未装驱动 \~english Driver mismatch or unmounted drive
#define MV_E_USB_UNKNOW 0x800003FF ///< \~chinese USB未知的错误 \~english USB unknown error
/// @}
/********************************************************************/
/// \~chinese
/// \name 升级时对应的错误码:范围0x80000400-0x800004FF
/// @{
/// \~english
/// \name Upgrade Error Codes: Range from 0x80000400 to 0x800004FF
/// @{
#define MV_E_UPG_FILE_MISMATCH 0x80000400 ///< \~chinese 升级固件不匹配 \~english Firmware mismatches
#define MV_E_UPG_LANGUSGE_MISMATCH 0x80000401 ///< \~chinese 升级固件语言不匹配 \~english Firmware language mismatches
#define MV_E_UPG_CONFLICT 0x80000402 ///< \~chinese 升级冲突(设备已经在升级了再次请求升级即返回此错误) \~english Upgrading conflicted (repeated upgrading requests during device upgrade)
#define MV_E_UPG_INNER_ERR 0x80000403 ///< \~chinese 升级时相机内部出现错误 \~english Camera internal error during upgrade
#define MV_E_UPG_UNKNOW 0x800004FF ///< \~chinese 升级时未知错误 \~english Unknown error during upgrade
/// @}
#endif //_MV_ERROR_DEFINE_H_

View File

@ -0,0 +1,93 @@
#ifndef _MV_ISP_ERROR_DEFINE_H_
#define _MV_ISP_ERROR_DEFINE_H_
/************************************************************************
* ISP算法库的错误码
************************************************************************/
// 通用类型
#define MV_ALG_OK 0x00000000 //处理正确
#define MV_ALG_ERR 0x10000000 //不确定类型错误
// 能力检查
#define MV_ALG_E_ABILITY_ARG 0x10000001 //能力集中存在无效参数
// 内存检查
#define MV_ALG_E_MEM_NULL 0x10000002 //内存地址为空
#define MV_ALG_E_MEM_ALIGN 0x10000003 //内存对齐不满足要求
#define MV_ALG_E_MEM_LACK 0x10000004 //内存空间大小不够
#define MV_ALG_E_MEM_SIZE_ALIGN 0x10000005 //内存空间大小不满足对齐要求
#define MV_ALG_E_MEM_ADDR_ALIGN 0x10000006 //内存地址不满足对齐要求
// 图像检查
#define MV_ALG_E_IMG_FORMAT 0x10000007 //图像格式不正确或者不支持
#define MV_ALG_E_IMG_SIZE 0x10000008 //图像宽高不正确或者超出范围
#define MV_ALG_E_IMG_STEP 0x10000009 //图像宽高与step参数不匹配
#define MV_ALG_E_IMG_DATA_NULL 0x1000000A //图像数据存储地址为空
// 输入输出参数检查
#define MV_ALG_E_CFG_TYPE 0x1000000B //设置或者获取参数类型不正确
#define MV_ALG_E_CFG_SIZE 0x1000000C //设置或者获取参数的输入、输出结构体大小不正确
#define MV_ALG_E_PRC_TYPE 0x1000000D //处理类型不正确
#define MV_ALG_E_PRC_SIZE 0x1000000E //处理时输入、输出参数大小不正确
#define MV_ALG_E_FUNC_TYPE 0x1000000F //子处理类型不正确
#define MV_ALG_E_FUNC_SIZE 0x10000010 //子处理时输入、输出参数大小不正确
// 运行参数检查
#define MV_ALG_E_PARAM_INDEX 0x10000011 //index参数不正确
#define MV_ALG_E_PARAM_VALUE 0x10000012 //value参数不正确或者超出范围
#define MV_ALG_E_PARAM_NUM 0x10000013 //param_num参数不正确
// 接口调用检查
#define MV_ALG_E_NULL_PTR 0x10000014 //函数参数指针为空
#define MV_ALG_E_OVER_MAX_MEM 0x10000015 //超过限定的最大内存
#define MV_ALG_E_CALL_BACK 0x10000016 //回调函数出错
// 算法库加密相关检查
#define MV_ALG_E_ENCRYPT 0x10000017 //加密错误
#define MV_ALG_E_EXPIRE 0x10000018 //算法库使用期限错误
// 内部模块返回的基本错误类型
#define MV_ALG_E_BAD_ARG 0x10000019 //参数范围不正确
#define MV_ALG_E_DATA_SIZE 0x1000001A //数据大小不正确
#define MV_ALG_E_STEP 0x1000001B //数据step不正确
// cpu指令集支持错误码
#define MV_ALG_E_CPUID 0x1000001C //cpu不支持优化代码中的指令集
#define MV_ALG_WARNING 0x1000001D //警告
#define MV_ALG_E_TIME_OUT 0x1000001E //算法库超时
#define MV_ALG_E_LIB_VERSION 0x1000001F //算法版本号出错
#define MV_ALG_E_MODEL_VERSION 0x10000020 //模型版本号出错
#define MV_ALG_E_GPU_MEM_ALLOC 0x10000021 //GPU内存分配错误
#define MV_ALG_E_FILE_NON_EXIST 0x10000022 //文件不存在
#define MV_ALG_E_NONE_STRING 0x10000023 //字符串为空
#define MV_ALG_E_IMAGE_CODEC 0x10000024 //图像解码器错误
#define MV_ALG_E_FILE_OPEN 0x10000025 //打开文件错误
#define MV_ALG_E_FILE_READ 0x10000026 //文件读取错误
#define MV_ALG_E_FILE_WRITE 0x10000027 //文件写错误
#define MV_ALG_E_FILE_READ_SIZE 0x10000028 //文件读取大小错误
#define MV_ALG_E_FILE_TYPE 0x10000029 //文件类型错误
#define MV_ALG_E_MODEL_TYPE 0x1000002A //模型类型错误
#define MV_ALG_E_MALLOC_MEM 0x1000002B //分配内存错误
#define MV_ALG_E_BIND_CORE_FAILED 0x1000002C //线程绑核失败
// 降噪特有错误码
#define MV_ALG_E_DENOISE_NE_IMG_FORMAT 0x10402001 //噪声特性图像格式错误
#define MV_ALG_E_DENOISE_NE_FEATURE_TYPE 0x10402002 //噪声特性类型错误
#define MV_ALG_E_DENOISE_NE_PROFILE_NUM 0x10402003 //噪声特性个数错误
#define MV_ALG_E_DENOISE_NE_GAIN_NUM 0x10402004 //噪声特性增益个数错误
#define MV_ALG_E_DENOISE_NE_GAIN_VAL 0x10402005 //噪声曲线增益值输入错误
#define MV_ALG_E_DENOISE_NE_BIN_NUM 0x10402006 //噪声曲线柱数错误
#define MV_ALG_E_DENOISE_NE_INIT_GAIN 0x10402007 //噪声估计初始化增益设置错误
#define MV_ALG_E_DENOISE_NE_NOT_INIT 0x10402008 //噪声估计未初始化
#define MV_ALG_E_DENOISE_COLOR_MODE 0x10402009 //颜色空间模式错误
#define MV_ALG_E_DENOISE_ROI_NUM 0x1040200a //图像ROI个数错误
#define MV_ALG_E_DENOISE_ROI_ORI_PT 0x1040200b //图像ROI原点错误
#define MV_ALG_E_DENOISE_ROI_SIZE 0x1040200c //图像ROI大小错误
#define MV_ALG_E_DENOISE_GAIN_NOT_EXIST 0x1040200d //输入的相机增益不存在(增益个数已达上限)
#define MV_ALG_E_DENOISE_GAIN_BEYOND_RANGE 0x1040200e //输入的相机增益不在范围内
#define MV_ALG_E_DENOISE_NP_BUF_SIZE 0x1040200f //输入的噪声特性内存大小错误
#endif //_MV_ISP_ERROR_DEFINE_H_

Some files were not shown because too many files have changed in this diff Show More