Compare commits

...

No commits in common. "master" and "main" have entirely different histories.
master ... main

134 changed files with 49 additions and 16388 deletions

47
.gitignore vendored Normal file
View File

@ -0,0 +1,47 @@
# ---> C++
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# ---> CMake
CMakeLists.txt.user
CMakeCache.txt
CMakeFiles
CMakeScripts
Testing
Makefile
cmake_install.cmake
install_manifest.txt
compile_commands.json
CTestTestfile.cmake
_deps

View File

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

View File

@ -1,100 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="AutoImportSettings">
<option name="autoReloadType" value="SELECTIVE" />
</component>
<component name="CMakeSettings">
<configurations>
<configuration PROFILE_NAME="Debug" ENABLED="true" CONFIG_NAME="Debug" />
</configurations>
</component>
<component name="ChangeListManager">
<list default="true" id="c7fa9fee-45cc-4763-9918-46f77b2144e2" name="Changes" comment="rm_vision HFUT备份" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="ClangdSettings">
<option name="formatViaClangd" value="false" />
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
<option name="ROOT_SYNC" value="SYNC" />
</component>
<component name="MarkdownSettingsMigration">
<option name="stateVersion" value="1" />
</component>
<component name="ProjectId" id="2ZWYnhxBCTbUCKX7fIErqOGmJhb" />
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent"><![CDATA[{
"keyToString": {
"RunOnceActivity.ShowReadmeOnStart": "true",
"RunOnceActivity.cidr.known.project.marker": "true",
"WebServerToolWindowFactoryState": "false",
"cf.first.check.clang-format": "false",
"cidr.known.project.marker": "true",
"git-widget-placeholder": "master",
"last_opened_file_path": "D:/CLion/Project/src",
"node.js.detected.package.eslint": "true",
"node.js.detected.package.tslint": "true",
"node.js.selected.package.eslint": "(autodetect)",
"node.js.selected.package.tslint": "(autodetect)",
"settings.editor.selected.configurable": "vcs.Subversion.Network",
"vue.rearranger.settings.migration": "true"
}
}]]></component>
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
<component name="SvnConfiguration">
<configuration>C:\Users\SJQ\AppData\Roaming\Subversion</configuration>
</component>
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="c7fa9fee-45cc-4763-9918-46f77b2144e2" name="Changes" comment="" />
<created>1702539472285</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1702539472285</updated>
<workItem from="1702539474461" duration="496000" />
</task>
<task id="LOCAL-00001" summary="rm_vision HFUT备份">
<created>1702539519685</created>
<option name="number" value="00001" />
<option name="presentableId" value="LOCAL-00001" />
<option name="project" value="LOCAL" />
<updated>1702539519685</updated>
</task>
<option name="localTasksCounter" value="2" />
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
<component name="Vcs.Log.Tabs.Properties">
<option name="TAB_STATES">
<map>
<entry key="MAIN">
<value>
<State />
</value>
</entry>
</map>
</option>
</component>
<component name="VcsManagerConfiguration">
<ignored-roots>
<path value="$PROJECT_DIR$/rm_auto_aim" />
<path value="$PROJECT_DIR$/rm_gimbal_description" />
<path value="$PROJECT_DIR$/rm_serial_driver" />
<path value="$PROJECT_DIR$/rm_vision" />
<path value="$PROJECT_DIR$/ros2_hik_camera" />
<path value="$PROJECT_DIR$/ros2_mindvision_camera" />
</ignored-roots>
<MESSAGE value="rm_vision HFUT备份" />
<option name="LAST_COMMIT_MESSAGE" value="rm_vision HFUT备份" />
</component>
</project>

2
README.md Normal file
View File

@ -0,0 +1,2 @@
# RM_Vison

View File

@ -1,18 +0,0 @@
---
Language: Cpp
BasedOnStyle: Google
AccessModifierOffset: -2
AlignAfterOpenBracket: AlwaysBreak
BraceWrapping:
AfterClass: true
AfterFunction: true
AfterNamespace: true
AfterStruct: true
BreakBeforeBraces: Custom
ColumnLimit: 100
ConstructorInitializerIndentWidth: 0
ContinuationIndentWidth: 2
DerivePointerAlignment: false
PointerAlignment: Middle
ReflowComments: false

View File

@ -1,55 +0,0 @@
---
Checks: '-*,
performance-*,
-performance-unnecessary-value-param,
llvm-namespace-comment,
modernize-redundant-void-arg,
modernize-use-nullptr,
modernize-use-default,
modernize-use-override,
modernize-loop-convert,
modernize-make-shared,
modernize-make-unique,
misc-unused-parameters,
readability-named-parameter,
readability-redundant-smartptr-get,
readability-redundant-string-cstr,
readability-simplify-boolean-expr,
readability-container-size-empty,
readability-identifier-naming,
'
HeaderFilterRegex: ''
AnalyzeTemporaryDtors: false
CheckOptions:
- key: llvm-namespace-comment.ShortNamespaceLines
value: '10'
- key: llvm-namespace-comment.SpacesBeforeComments
value: '2'
- key: misc-unused-parameters.StrictMode
value: '1'
- key: readability-braces-around-statements.ShortStatementLines
value: '2'
# type names
- key: readability-identifier-naming.ClassCase
value: CamelCase
- key: readability-identifier-naming.EnumCase
value: CamelCase
- key: readability-identifier-naming.UnionCase
value: CamelCase
# method names
- key: readability-identifier-naming.MethodCase
value: camelBack
# variable names
- key: readability-identifier-naming.VariableCase
value: lower_case
- key: readability-identifier-naming.ClassMemberSuffix
value: '_'
# const static or global variables are UPPER_CASE
- key: readability-identifier-naming.EnumConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.StaticConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.ClassConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.GlobalVariableCase
value: UPPER_CASE

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022 ChenJun
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,61 +0,0 @@
# rm_auto_aim
## Overview
RoboMaster 装甲板自瞄算法模块
<img src="docs/rm_vision.svg" alt="rm_vision" width="200" height="200">
该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块
若有帮助请Star这个项目感谢~
### License
The source code is released under a [MIT license](rm_auto_aim/LICENSE).
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
Author: Chen Jun
运行环境Ubuntu 22.04 / ROS2 Humble (未在其他环境下测试)
![Build Status](https://github.com/chenjunnn/rm_auto_aim/actions/workflows/ros_ci.yml/badge.svg)
## Building from Source
### Building
在 Ubuntu 22.04 环境下安装 [ROS2 Humble](https://docs.ros.org/en/humble/Installation/Ubuntu-Install-Debians.html)
创建 ROS 工作空间后 clone 项目,使用 rosdep 安装依赖后编译代码
cd ros_ws/src
git clone https://github.com/chenjunnn/rm_auto_aim.git
cd ..
rosdep install --from-paths src --ignore-src -r -y
colcon build --symlink-install --packages-up-to auto_aim_bringup
### Testing
Run the tests with
colcon test --packages-up-to auto_aim_bringup
## Packages
- [armor_detector](armor_detector)
订阅相机参数及图像流进行装甲板的识别并解算三维位置输出识别到的装甲板在输入frame下的三维位置 (一般是以相机光心为原点的相机坐标系)
- [armor_tracker](armor_tracker)
订阅识别节点发布的装甲板三维位置及机器人的坐标转换信息将装甲板三维位置变换到指定惯性系一般是以云台中心为原点IMU 上电时的 Yaw 朝向为 X 轴的惯性系)下,然后将装甲板目标送入跟踪器中,输出跟踪机器人在指定惯性系下的状态
- auto_aim_interfaces
定义了识别节点和处理节点的接口以及定义了用于 Debug 的信息
- auto_aim_bringup
包含启动识别节点和处理节点的默认参数文件及 launch 文件

View File

@ -1,68 +0,0 @@
cmake_minimum_required(VERSION 3.10)
project(armor_detector)
## Use C++14
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
## By adding -Wall and -Werror, the compiler does not ignore warnings anymore,
## enforcing cleaner code.
add_definitions(-Wall -Werror)
## Export compile commands for clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
#######################
## Find dependencies ##
#######################
find_package(ament_cmake_auto REQUIRED)
find_package(OpenCV REQUIRED)
ament_auto_find_build_dependencies()
###########
## Build ##
###########
ament_auto_add_library(${PROJECT_NAME} SHARED
DIRECTORY src
)
target_include_directories(${PROJECT_NAME} PUBLIC ${OpenCV_INCLUDE_DIRS})
target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS})
rclcpp_components_register_node(${PROJECT_NAME}
PLUGIN rm_auto_aim::ArmorDetectorNode
EXECUTABLE armor_detector_node
)
#############
## Testing ##
#############
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
list(APPEND AMENT_LINT_AUTO_EXCLUDE
ament_cmake_copyright
ament_cmake_uncrustify
ament_cmake_cpplint
)
ament_lint_auto_find_test_dependencies()
find_package(ament_cmake_gtest)
ament_add_gtest(test_node_startup test/test_node_startup.cpp)
target_link_libraries(test_node_startup ${PROJECT_NAME})
ament_add_gtest(test_number_cls test/test_number_cls.cpp)
target_link_libraries(test_number_cls ${PROJECT_NAME})
endif()
#############
## Install ##
#############
ament_auto_package(
INSTALL_TO_SHARE
model
)

View File

@ -1,104 +0,0 @@
# armor_detector
- [DetectorNode](#basedetectornode)
- [Detector](#detector)
- [NumberClassifier](#numberclassifier)
- [PnPSolver](#pnpsolver)
## 识别节点
订阅相机参数及图像流进行装甲板的识别并解算三维位置输出识别到的装甲板在输入frame下的三维位置 (一般是以相机光心为原点的相机坐标系)
### DetectorNode
装甲板识别节点
包含[Detector](#detector)
包含[PnPSolver](#pnpsolver)
订阅:
- 相机参数 `/camera_info`
- 彩色图像 `/image_raw`
发布:
- 识别目标 `/detector/armors`
静态参数:
- 筛选灯条的参数 `light`
- 长宽比范围 `min/max_ratio`
- 最大倾斜角度 `max_angle`
- 筛选灯条配对结果的参数 `armor`
- 两灯条的最小长度之比(短边/长边)`min_light_ratio `
- 装甲板两灯条中心的距离范围(大装甲板)`min/max_large_center_distance`
- 装甲板两灯条中心的距离范围(小装甲板)`min/max_small_center_distance`
- 装甲板的最大倾斜角度 `max_angle`
动态参数:
- 是否发布 debug 信息 `debug`
- 识别目标颜色 `detect_color`
- 二值化的最小阈值 `binary_thres`
- 数字分类器 `classifier`
- 置信度阈值 `threshold`
## Detector
装甲板识别器
### preprocessImage
预处理
| ![](docs/raw.png) | ![](docs/hsv_bin.png) | ![](docs/gray_bin.png) |
| :---------------: | :-------------------: | :--------------------: |
| 原图 | 通过颜色二值化 | 通过灰度二值化 |
由于一般工业相机的动态范围不够大,导致若要能够清晰分辨装甲板的数字,得到的相机图像中灯条中心就会过曝,灯条中心的像素点的值往往都是 R=B。根据颜色信息来进行二值化效果不佳因此此处选择了直接通过灰度图进行二值化将灯条的颜色判断放到后续处理中。
### findLights
寻找灯条
通过 findContours 得到轮廓,再通过 minAreaRect 获得最小外接矩形,对其进行长宽比和倾斜角度的判断,可以高效的筛除形状不满足的亮斑。
判断灯条颜色这里采用了对轮廓内的的R/B值求和判断两和的的大小的方法`sum_r > sum_b` 则认为是红色灯条,反之则认为是蓝色灯条。
| ![](docs/red.png) | ![](docs/blue.png) |
| :---------------: | :----------------: |
| 提取出的红色灯条 | 提取出的蓝色灯条 |
### matchLights
配对灯条
根据 `detect_color` 选择对应颜色的灯条进行两两配对,首先筛除掉两条灯条中间包含另一个灯条的情况,然后根据两灯条的长度之比、两灯条中心的距离、配对出装甲板的倾斜角度来筛选掉条件不满足的结果,得到形状符合装甲板特征的灯条配对。
## NumberClassifier
数字分类器
### extractNumbers
提取数字
| ![](docs/num_raw.png) | ![](docs/num_warp.png) | ![](docs/num_roi.png) | ![](docs/num_bin.png) |
| :-------------------: | :--------------------: | :-------------------: | :-------------------: |
| 原图 | 透视变换 | 取ROI | 二值化 |
将每条灯条上下的角点拉伸到装甲板的上下边缘作为待变换点进行透视变换再对变换后的图像取ROI。考虑到数字图案实质上就是黑色背景+白色图案,所以此处使用了大津法进行二值化。
### Classify
分类
由于上一步对于数字的提取效果已经非常好数字图案的特征非常清晰明显装甲板的远近、旋转都不会使图案产生过多畸变且图案像素点少所以我们使用多层感知机MLP进行分类。
网络结构中定义了两个隐藏层和一个分类层,将二值化后的数字展平成 20x28=560 维的输入,送入网络进行分类。
网络结构:
![](docs/model.svg)
<!-- 效果图: -->
<!-- ![](docs/result.png) -->
## PnPSolver
PnP解算器
[Perspective-n-Point (PnP) pose computation](https://docs.opencv.org/4.x/d5/d1f/calib3d_solvePnP.html)
PnP解算器将 `cv::solvePnP()` 封装,接口中传入 `Armor` 类型的数据即可得到 `geometry_msgs::msg::Point` 类型的三维坐标。
考虑到装甲板的四个点在一个平面上在PnP解算方法上我们选择了 `cv::SOLVEPNP_IPPE` (Method is based on the paper of T. Collins and A. Bartoli. ["Infinitesimal Plane-Based Pose Estimation"](https://link.springer.com/article/10.1007/s11263-014-0725-5). This method requires coplanar object points.)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 307 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 260 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 151 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

View File

@ -1,73 +0,0 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__ARMOR_HPP_
#define ARMOR_DETECTOR__ARMOR_HPP_
#include <opencv2/core.hpp>
// STL
#include <algorithm>
#include <string>
namespace rm_auto_aim
{
const int RED = 0;
const int BLUE = 1;
enum class ArmorType { SMALL, LARGE, INVALID };
const std::string ARMOR_TYPE_STR[3] = {"small", "large", "invalid"};
struct Light : public cv::RotatedRect
{
Light() = default;
explicit Light(cv::RotatedRect box) : cv::RotatedRect(box)
{
cv::Point2f p[4];
box.points(p);
std::sort(p, p + 4, [](const cv::Point2f & a, const cv::Point2f & b) { return a.y < b.y; });
top = (p[0] + p[1]) / 2;
bottom = (p[2] + p[3]) / 2;
length = cv::norm(top - bottom);
width = cv::norm(p[0] - p[1]);
tilt_angle = std::atan2(std::abs(top.x - bottom.x), std::abs(top.y - bottom.y));
tilt_angle = tilt_angle / CV_PI * 180;
}
int color;
cv::Point2f top, bottom;
double length;
double width;
float tilt_angle;
};
struct Armor
{
Armor() = default;
Armor(const Light & l1, const Light & l2)
{
if (l1.center.x < l2.center.x) {
left_light = l1, right_light = l2;
} else {
left_light = l2, right_light = l1;
}
center = (left_light.center + right_light.center) / 2;
}
// Light pairs part
Light left_light, right_light;
cv::Point2f center;
ArmorType type;
// Number part
cv::Mat number_img;
std::string number;
float confidence;
std::string classfication_result;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__ARMOR_HPP_

View File

@ -1,83 +0,0 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__DETECTOR_HPP_
#define ARMOR_DETECTOR__DETECTOR_HPP_
// OpenCV
#include <opencv2/core.hpp>
#include <opencv2/core/types.hpp>
// STD
#include <cmath>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
#include "armor_detector/number_classifier.hpp"
#include "auto_aim_interfaces/msg/debug_armors.hpp"
#include "auto_aim_interfaces/msg/debug_lights.hpp"
namespace rm_auto_aim
{
class Detector
{
public:
struct LightParams
{
// width / height
double min_ratio;
double max_ratio;
// vertical angle
double max_angle;
};
struct ArmorParams
{
double min_light_ratio;
// light pairs distance
double min_small_center_distance;
double max_small_center_distance;
double min_large_center_distance;
double max_large_center_distance;
// horizontal angle
double max_angle;
};
Detector(const int & bin_thres, const int & color, const LightParams & l, const ArmorParams & a);
std::vector<Armor> detect(const cv::Mat & input);
cv::Mat preprocessImage(const cv::Mat & input);
std::vector<Light> findLights(const cv::Mat & rbg_img, const cv::Mat & binary_img);
std::vector<Armor> matchLights(const std::vector<Light> & lights);
// For debug usage
cv::Mat getAllNumbersImage();
void drawResults(cv::Mat & img);
int binary_thres;
int detect_color;
LightParams l;
ArmorParams a;
std::unique_ptr<NumberClassifier> classifier;
// Debug msgs
cv::Mat binary_img;
auto_aim_interfaces::msg::DebugLights debug_lights;
auto_aim_interfaces::msg::DebugArmors debug_armors;
private:
bool isLight(const Light & possible_light);
bool containLight(
const Light & light_1, const Light & light_2, const std::vector<Light> & lights);
ArmorType isArmor(const Light & light_1, const Light & light_2);
std::vector<Light> lights_;
std::vector<Armor> armors_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__DETECTOR_HPP_

View File

@ -1,81 +0,0 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__DETECTOR_NODE_HPP_
#define ARMOR_DETECTOR__DETECTOR_NODE_HPP_
// ROS
#include <image_transport/image_transport.hpp>
#include <image_transport/publisher.hpp>
#include <image_transport/subscriber_filter.hpp>
#include <rclcpp/publisher.hpp>
#include <rclcpp/rclcpp.hpp>
#include <sensor_msgs/msg/camera_info.hpp>
#include <sensor_msgs/msg/image.hpp>
#include <visualization_msgs/msg/marker_array.hpp>
// STD
#include <memory>
#include <string>
#include <vector>
#include "armor_detector/detector.hpp"
#include "armor_detector/number_classifier.hpp"
#include "armor_detector/pnp_solver.hpp"
#include "auto_aim_interfaces/msg/armors.hpp"
namespace rm_auto_aim
{
class ArmorDetectorNode : public rclcpp::Node
{
public:
ArmorDetectorNode(const rclcpp::NodeOptions & options);
private:
void imageCallback(const sensor_msgs::msg::Image::ConstSharedPtr img_msg);
std::unique_ptr<Detector> initDetector();
std::vector<Armor> detectArmors(const sensor_msgs::msg::Image::ConstSharedPtr & img_msg);
void createDebugPublishers();
void destroyDebugPublishers();
void publishMarkers();
// Armor Detector
std::unique_ptr<Detector> detector_;
// Detected armors publisher
auto_aim_interfaces::msg::Armors armors_msg_;
rclcpp::Publisher<auto_aim_interfaces::msg::Armors>::SharedPtr armors_pub_;
// Visualization marker publisher
visualization_msgs::msg::Marker armor_marker_;
visualization_msgs::msg::Marker text_marker_;
visualization_msgs::msg::MarkerArray marker_array_;
rclcpp::Publisher<visualization_msgs::msg::MarkerArray>::SharedPtr marker_pub_;
// Camera info part
rclcpp::Subscription<sensor_msgs::msg::CameraInfo>::SharedPtr cam_info_sub_;
cv::Point2f cam_center_;
std::shared_ptr<sensor_msgs::msg::CameraInfo> cam_info_;
std::unique_ptr<PnPSolver> pnp_solver_;
// Image subscrpition
rclcpp::Subscription<sensor_msgs::msg::Image>::SharedPtr img_sub_;
// Debug information
bool debug_;
std::shared_ptr<rclcpp::ParameterEventHandler> debug_param_sub_;
std::shared_ptr<rclcpp::ParameterCallbackHandle> debug_cb_handle_;
rclcpp::Publisher<auto_aim_interfaces::msg::DebugLights>::SharedPtr lights_data_pub_;
rclcpp::Publisher<auto_aim_interfaces::msg::DebugArmors>::SharedPtr armors_data_pub_;
image_transport::Publisher binary_img_pub_;
image_transport::Publisher number_img_pub_;
image_transport::Publisher result_img_pub_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__DETECTOR_NODE_HPP_

View File

@ -1,40 +0,0 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_
#define ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_
// OpenCV
#include <opencv2/opencv.hpp>
// STL
#include <cstddef>
#include <iostream>
#include <map>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
namespace rm_auto_aim
{
class NumberClassifier
{
public:
NumberClassifier(
const std::string & model_path, const std::string & label_path, const double threshold,
const std::vector<std::string> & ignore_classes = {});
void extractNumbers(const cv::Mat & src, std::vector<Armor> & armors);
void classify(std::vector<Armor> & armors);
double threshold;
private:
cv::dnn::Net net_;
std::vector<std::string> class_names_;
std::vector<std::string> ignore_classes_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__NUMBER_CLASSIFIER_HPP_

View File

@ -1,48 +0,0 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#ifndef ARMOR_DETECTOR__PNP_SOLVER_HPP_
#define ARMOR_DETECTOR__PNP_SOLVER_HPP_
#include <geometry_msgs/msg/point.hpp>
#include <opencv2/core.hpp>
// STD
#include <array>
#include <vector>
#include "armor_detector/armor.hpp"
namespace rm_auto_aim
{
class PnPSolver
{
public:
PnPSolver(
const std::array<double, 9> & camera_matrix,
const std::vector<double> & distortion_coefficients);
// Get 3d position
bool solvePnP(const Armor & armor, cv::Mat & rvec, cv::Mat & tvec);
// Calculate the distance between armor center and image center
float calculateDistanceToCenter(const cv::Point2f & image_point);
private:
cv::Mat camera_matrix_;
cv::Mat dist_coeffs_;
// Unit: mm
static constexpr float SMALL_ARMOR_WIDTH = 135;
static constexpr float SMALL_ARMOR_HEIGHT = 55;
static constexpr float LARGE_ARMOR_WIDTH = 225;
static constexpr float LARGE_ARMOR_HEIGHT = 55;
// Four vertices of armor in 3d
std::vector<cv::Point3f> small_armor_points_;
std::vector<cv::Point3f> large_armor_points_;
};
} // namespace rm_auto_aim
#endif // ARMOR_DETECTOR__PNP_SOLVER_HPP_

View File

@ -1,9 +0,0 @@
1
2
3
4
5
outpost
guard
base
negative

View File

@ -1,37 +0,0 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>armor_detector</name>
<version>0.1.0</version>
<description>A template for ROS packages.</description>
<maintainer email="chen.junn@outlook.com">Chen Jun</maintainer>
<license>BSD</license>
<url type="website">https://github.com/chenjunnn/rm_auto_aim</url>
<url type="bugtracker">https://github.com/chenjunnn/rm_auto_aim/issues</url>
<author email="chen.junn@outlook.com">Chen Jun</author>
<!-- buildtool_depend: dependencies of the build process -->
<buildtool_depend>ament_cmake</buildtool_depend>
<!-- depend: build, export, and execution dependency -->
<depend>rclcpp</depend>
<depend>rclcpp_components</depend>
<depend>sensor_msgs</depend>
<depend>geometry_msgs</depend>
<depend>visualization_msgs</depend>
<depend>message_filters</depend>
<depend>cv_bridge</depend>
<depend>image_transport</depend>
<depend>image_transport_plugins</depend>
<depend>auto_aim_interfaces</depend>
<depend>vision_opencv</depend>
<depend>tf2_geometry_msgs</depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<test_depend>ament_cmake_clang_format</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -1,245 +0,0 @@
// Copyright (c) 2022 ChenJun
// Licensed under the MIT License.
// OpenCV
#include <opencv2/core.hpp>
#include <opencv2/core/base.hpp>
#include <opencv2/core/mat.hpp>
#include <opencv2/core/types.hpp>
#include <opencv2/imgproc.hpp>
// STD
#include <algorithm>
#include <cmath>
#include <vector>
#include "armor_detector/detector.hpp"
#include "auto_aim_interfaces/msg/debug_armor.hpp"
#include "auto_aim_interfaces/msg/debug_light.hpp"
namespace rm_auto_aim
{
Detector::Detector(
const int & bin_thres, const int & color, const LightParams & l, const ArmorParams & a)
: binary_thres(bin_thres), detect_color(color), l(l), a(a)
{
}
std::vector<Armor> Detector::detect(const cv::Mat & input)
{
binary_img = preprocessImage(input);
lights_ = findLights(input, binary_img);
armors_ = matchLights(lights_);
if (!armors_.empty()) {
classifier->extractNumbers(input, armors_);
classifier->classify(armors_);
}
return armors_;
}
cv::Mat Detector::preprocessImage(const cv::Mat & rgb_img)
{
cv::Mat gray_img;
cv::cvtColor(rgb_img, gray_img, cv::COLOR_RGB2GRAY);
cv::Mat binary_img;
cv::threshold(gray_img, binary_img, binary_thres, 255, cv::THRESH_BINARY);
return binary_img;
}
std::vector<Light> Detector::findLights(const cv::Mat & rbg_img, const cv::Mat & binary_img)
{
using std::vector;
vector<vector<cv::Point>> contours;
vector<cv::Vec4i> hierarchy;
cv::findContours(binary_img, contours, hierarchy, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
vector<Light> lights;
this->debug_lights.data.clear();
for (const auto & contour : contours) {
if (contour.size() < 5) continue;
auto r_rect = cv::minAreaRect(contour);
auto light = Light(r_rect);
if (isLight(light)) {
auto rect = light.boundingRect();
if ( // Avoid assertion failed
0 <= rect.x && 0 <= rect.width && rect.x + rect.width <= rbg_img.cols && 0 <= rect.y &&
0 <= rect.height && rect.y + rect.height <= rbg_img.rows) {
int sum_r = 0, sum_b = 0;
auto roi = rbg_img(rect);
// Iterate through the ROI
for (int i = 0; i < roi.rows; i++) {
for (int j = 0; j < roi.cols; j++) {
if (cv::pointPolygonTest(contour, cv::Point2f(j + rect.x, i + rect.y), false) >= 0) {
// if point is inside contour
sum_r += roi.at<cv::Vec3b>(i, j)[0];
sum_b += roi.at<cv::Vec3b>(i, j)[2];
}
}
}
// Sum of red pixels > sum of blue pixels ?
light.color = sum_r > sum_b ? RED : BLUE;
lights.emplace_back(light);
}
}
}
return lights;
}
bool Detector::isLight(const Light & light)
{
// The ratio of light (short side / long side)
float ratio = light.width / light.length;
bool ratio_ok = l.min_ratio < ratio && ratio < l.max_ratio;
bool angle_ok = light.tilt_angle < l.max_angle;
bool is_light = ratio_ok && angle_ok;
// Fill in debug information
auto_aim_interfaces::msg::DebugLight light_data;
light_data.center_x = light.center.x;
light_data.ratio = ratio;
light_data.angle = light.tilt_angle;
light_data.is_light = is_light;
this->debug_lights.data.emplace_back(light_data);
return is_light;
}
std::vector<Armor> Detector::matchLights(const std::vector<Light> & lights)
{
std::vector<Armor> armors;
this->debug_armors.data.clear();
// Loop all the pairing of lights
for (auto light_1 = lights.begin(); light_1 != lights.end(); light_1++) {
for (auto light_2 = light_1 + 1; light_2 != lights.end(); light_2++) {
if (light_1->color != detect_color || light_2->color != detect_color) continue;
if (containLight(*light_1, *light_2, lights)) {
continue;
}
auto type = isArmor(*light_1, *light_2);
if (type != ArmorType::INVALID) {
auto armor = Armor(*light_1, *light_2);
armor.type = type;
armors.emplace_back(armor);
}
}
}
return armors;
}
// Check if there is another light in the boundingRect formed by the 2 lights
bool Detector::containLight(
const Light & light_1, const Light & light_2, const std::vector<Light> & lights)
{
auto points = std::vector<cv::Point2f>{light_1.top, light_1.bottom, light_2.top, light_2.bottom};
auto bounding_rect = cv::boundingRect(points);
for (const auto & test_light : lights) {
if (test_light.center == light_1.center || test_light.center == light_2.center) continue;
if (
bounding_rect.contains(test_light.top) || bounding_rect.contains(test_light.bottom) ||
bounding_rect.contains(test_light.center)) {
return true;
}
}
return false;
}
ArmorType Detector::isArmor(const Light & light_1, const Light & light_2)
{
// Ratio of the length of 2 lights (short side / long side)
float light_length_ratio = light_1.length < light_2.length ? light_1.length / light_2.length
: light_2.length / light_1.length;
bool light_ratio_ok = light_length_ratio > a.min_light_ratio;
// Distance between the center of 2 lights (unit : light length)
float avg_light_length = (light_1.length + light_2.length) / 2;
float center_distance = cv::norm(light_1.center - light_2.center) / avg_light_length;
bool center_distance_ok = (a.min_small_center_distance <= center_distance &&
center_distance < a.max_small_center_distance) ||
(a.min_large_center_distance <= center_distance &&
center_distance < a.max_large_center_distance);
// Angle of light center connection
cv::Point2f diff = light_1.center - light_2.center;
float angle = std::abs(std::atan(diff.y / diff.x)) / CV_PI * 180;
bool angle_ok = angle < a.max_angle;
bool is_armor = light_ratio_ok && center_distance_ok && angle_ok;
// Judge armor type
ArmorType type;
if (is_armor) {
type = center_distance > a.min_large_center_distance ? ArmorType::LARGE : ArmorType::SMALL;
} else {
type = ArmorType::INVALID;
}
// Fill in debug information
auto_aim_interfaces::msg::DebugArmor armor_data;
armor_data.type = ARMOR_TYPE_STR[static_cast<int>(type)];
armor_data.center_x = (light_1.center.x + light_2.center.x) / 2;
armor_data.light_ratio = light_length_ratio;
armor_data.center_distance = center_distance;
armor_data.angle = angle;
this->debug_armors.data.emplace_back(armor_data);
return type;
}
cv::Mat Detector::getAllNumbersImage()
{
if (armors_.empty()) {
return cv::Mat(cv::Size(20, 28), CV_8UC1);
} else {
std::vector<cv::Mat> number_imgs;
number_imgs.reserve(armors_.size());
for (auto & armor : armors_) {
number_imgs.emplace_back(armor.number_img);
}
cv::Mat all_num_img;
cv::vconcat(number_imgs, all_num_img);
return all_num_img;
}
}
void Detector::drawResults(cv::Mat & img)
{
// Draw Lights
for (const auto & light : lights_) {
cv::circle(img, light.top, 3, cv::Scalar(255, 255, 255), 1);
cv::circle(img, light.bottom, 3, cv::Scalar(255, 255, 255), 1);
auto line_color = light.color == RED ? cv::Scalar(255, 255, 0) : cv::Scalar(255, 0, 255);
cv::line(img, light.top, light.bottom, line_color, 1);
}
// Draw armors
for (const auto & armor : armors_) {
cv::line(img, armor.left_light.top, armor.right_light.bottom, cv::Scalar(0, 255, 0), 2);
cv::line(img, armor.left_light.bottom, armor.right_light.top, cv::Scalar(0, 255, 0), 2);
}
// Show numbers and confidence
for (const auto & armor : armors_) {
cv::putText(
img, armor.classfication_result, armor.left_light.top, cv::FONT_HERSHEY_SIMPLEX, 0.8,
cv::Scalar(0, 255, 255), 2);
}
}
} // namespace rm_auto_aim

View File

@ -1,292 +0,0 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
#include <cv_bridge/cv_bridge.h>
#include <rmw/qos_profiles.h>
#include <tf2/LinearMath/Matrix3x3.h>
#include <tf2/convert.h>
#include <ament_index_cpp/get_package_share_directory.hpp>
#include <image_transport/image_transport.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <rclcpp/duration.hpp>
#include <rclcpp/qos.hpp>
#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
// STD
#include <algorithm>
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
#include "armor_detector/detector_node.hpp"
namespace rm_auto_aim
{
ArmorDetectorNode::ArmorDetectorNode(const rclcpp::NodeOptions & options)
: Node("armor_detector", options)
{
RCLCPP_INFO(this->get_logger(), "Starting DetectorNode!");
// Detector
detector_ = initDetector();
// Armors Publisher
armors_pub_ = this->create_publisher<auto_aim_interfaces::msg::Armors>(
"/detector/armors", rclcpp::SensorDataQoS());
// Visualization Marker Publisher
// See http://wiki.ros.org/rviz/DisplayTypes/Marker
armor_marker_.ns = "armors";
armor_marker_.action = visualization_msgs::msg::Marker::ADD;
armor_marker_.type = visualization_msgs::msg::Marker::CUBE;
armor_marker_.scale.x = 0.05;
armor_marker_.scale.z = 0.125;
armor_marker_.color.a = 1.0;
armor_marker_.color.g = 0.5;
armor_marker_.color.b = 1.0;
armor_marker_.lifetime = rclcpp::Duration::from_seconds(0.1);
text_marker_.ns = "classification";
text_marker_.action = visualization_msgs::msg::Marker::ADD;
text_marker_.type = visualization_msgs::msg::Marker::TEXT_VIEW_FACING;
text_marker_.scale.z = 0.1;
text_marker_.color.a = 1.0;
text_marker_.color.r = 1.0;
text_marker_.color.g = 1.0;
text_marker_.color.b = 1.0;
text_marker_.lifetime = rclcpp::Duration::from_seconds(0.1);
marker_pub_ =
this->create_publisher<visualization_msgs::msg::MarkerArray>("/detector/marker", 10);
// Debug Publishers
debug_ = this->declare_parameter("debug", false);
if (debug_) {
createDebugPublishers();
}
// Debug param change moniter
debug_param_sub_ = std::make_shared<rclcpp::ParameterEventHandler>(this);
debug_cb_handle_ =
debug_param_sub_->add_parameter_callback("debug", [this](const rclcpp::Parameter & p) {
debug_ = p.as_bool();
debug_ ? createDebugPublishers() : destroyDebugPublishers();
});
cam_info_sub_ = this->create_subscription<sensor_msgs::msg::CameraInfo>(
"/camera_info", rclcpp::SensorDataQoS(),
[this](sensor_msgs::msg::CameraInfo::ConstSharedPtr camera_info) {
cam_center_ = cv::Point2f(camera_info->k[2], camera_info->k[5]);
cam_info_ = std::make_shared<sensor_msgs::msg::CameraInfo>(*camera_info);
pnp_solver_ = std::make_unique<PnPSolver>(camera_info->k, camera_info->d);
cam_info_sub_.reset();
});
img_sub_ = this->create_subscription<sensor_msgs::msg::Image>(
"/image_raw", rclcpp::SensorDataQoS(),
std::bind(&ArmorDetectorNode::imageCallback, this, std::placeholders::_1));
}
void ArmorDetectorNode::imageCallback(const sensor_msgs::msg::Image::ConstSharedPtr img_msg)
{
auto armors = detectArmors(img_msg);
if (pnp_solver_ != nullptr) {
armors_msg_.header = armor_marker_.header = text_marker_.header = img_msg->header;
armors_msg_.armors.clear();
marker_array_.markers.clear();
armor_marker_.id = 0;
text_marker_.id = 0;
auto_aim_interfaces::msg::Armor armor_msg;
for (const auto & armor : armors) {
cv::Mat rvec, tvec;
bool success = pnp_solver_->solvePnP(armor, rvec, tvec);
if (success) {
// Fill basic info
armor_msg.type = ARMOR_TYPE_STR[static_cast<int>(armor.type)];
armor_msg.number = armor.number;
// Fill pose
armor_msg.pose.position.x = tvec.at<double>(0);
armor_msg.pose.position.y = tvec.at<double>(1);
armor_msg.pose.position.z = tvec.at<double>(2);
// rvec to 3x3 rotation matrix
cv::Mat rotation_matrix;
cv::Rodrigues(rvec, rotation_matrix);
// rotation matrix to quaternion
tf2::Matrix3x3 tf2_rotation_matrix(
rotation_matrix.at<double>(0, 0), rotation_matrix.at<double>(0, 1),
rotation_matrix.at<double>(0, 2), rotation_matrix.at<double>(1, 0),
rotation_matrix.at<double>(1, 1), rotation_matrix.at<double>(1, 2),
rotation_matrix.at<double>(2, 0), rotation_matrix.at<double>(2, 1),
rotation_matrix.at<double>(2, 2));
tf2::Quaternion tf2_q;
tf2_rotation_matrix.getRotation(tf2_q);
armor_msg.pose.orientation = tf2::toMsg(tf2_q);
// Fill the distance to image center
armor_msg.distance_to_image_center = pnp_solver_->calculateDistanceToCenter(armor.center);
// Fill the markers
armor_marker_.id++;
armor_marker_.scale.y = armor.type == ArmorType::SMALL ? 0.135 : 0.23;
armor_marker_.pose = armor_msg.pose;
text_marker_.id++;
text_marker_.pose.position = armor_msg.pose.position;
text_marker_.pose.position.y -= 0.1;
text_marker_.text = armor.classfication_result;
armors_msg_.armors.emplace_back(armor_msg);
marker_array_.markers.emplace_back(armor_marker_);
marker_array_.markers.emplace_back(text_marker_);
} else {
RCLCPP_WARN(this->get_logger(), "PnP failed!");
}
}
// Publishing detected armors
armors_pub_->publish(armors_msg_);
// Publishing marker
publishMarkers();
}
}
std::unique_ptr<Detector> ArmorDetectorNode::initDetector()
{
rcl_interfaces::msg::ParameterDescriptor param_desc;
param_desc.integer_range.resize(1);
param_desc.integer_range[0].step = 1;
param_desc.integer_range[0].from_value = 0;
param_desc.integer_range[0].to_value = 255;
int binary_thres = declare_parameter("binary_thres", 160, param_desc);
param_desc.description = "0-RED, 1-BLUE";
param_desc.integer_range[0].from_value = 0;
param_desc.integer_range[0].to_value = 1;
auto detect_color = declare_parameter("detect_color", RED, param_desc);
Detector::LightParams l_params = {
.min_ratio = declare_parameter("light.min_ratio", 0.1),
.max_ratio = declare_parameter("light.max_ratio", 0.4),
.max_angle = declare_parameter("light.max_angle", 40.0)};
Detector::ArmorParams a_params = {
.min_light_ratio = declare_parameter("armor.min_light_ratio", 0.7),
.min_small_center_distance = declare_parameter("armor.min_small_center_distance", 0.8),
.max_small_center_distance = declare_parameter("armor.max_small_center_distance", 3.2),
.min_large_center_distance = declare_parameter("armor.min_large_center_distance", 3.2),
.max_large_center_distance = declare_parameter("armor.max_large_center_distance", 5.0),
.max_angle = declare_parameter("armor.max_angle", 35.0)};
auto detector = std::make_unique<Detector>(binary_thres, detect_color, l_params, a_params);
// Init classifier
auto pkg_path = ament_index_cpp::get_package_share_directory("armor_detector");
auto model_path = pkg_path + "/model/mlp.onnx";
auto label_path = pkg_path + "/model/label.txt";
double threshold = this->declare_parameter("classifier_threshold", 0.7);
std::vector<std::string> ignore_classes =
this->declare_parameter("ignore_classes", std::vector<std::string>{"negative"});
detector->classifier =
std::make_unique<NumberClassifier>(model_path, label_path, threshold, ignore_classes);
return detector;
}
std::vector<Armor> ArmorDetectorNode::detectArmors(
const sensor_msgs::msg::Image::ConstSharedPtr & img_msg)
{
// Convert ROS img to cv::Mat
auto img = cv_bridge::toCvShare(img_msg, "rgb8")->image;
// Update params
detector_->binary_thres = get_parameter("binary_thres").as_int();
detector_->detect_color = get_parameter("detect_color").as_int();
detector_->classifier->threshold = get_parameter("classifier_threshold").as_double();
auto armors = detector_->detect(img);
auto final_time = this->now();
auto latency = (final_time - img_msg->header.stamp).seconds() * 1000;
RCLCPP_DEBUG_STREAM(this->get_logger(), "Latency: " << latency << "ms");
// Publish debug info
if (debug_) {
binary_img_pub_.publish(
cv_bridge::CvImage(img_msg->header, "mono8", detector_->binary_img).toImageMsg());
// Sort lights and armors data by x coordinate
std::sort(
detector_->debug_lights.data.begin(), detector_->debug_lights.data.end(),
[](const auto & l1, const auto & l2) { return l1.center_x < l2.center_x; });
std::sort(
detector_->debug_armors.data.begin(), detector_->debug_armors.data.end(),
[](const auto & a1, const auto & a2) { return a1.center_x < a2.center_x; });
lights_data_pub_->publish(detector_->debug_lights);
armors_data_pub_->publish(detector_->debug_armors);
if (!armors.empty()) {
auto all_num_img = detector_->getAllNumbersImage();
number_img_pub_.publish(
*cv_bridge::CvImage(img_msg->header, "mono8", all_num_img).toImageMsg());
}
detector_->drawResults(img);
// Draw camera center
cv::circle(img, cam_center_, 5, cv::Scalar(255, 0, 0), 2);
// Draw latency
std::stringstream latency_ss;
latency_ss << "Latency: " << std::fixed << std::setprecision(2) << latency << "ms";
auto latency_s = latency_ss.str();
cv::putText(
img, latency_s, cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 255, 0), 2);
result_img_pub_.publish(cv_bridge::CvImage(img_msg->header, "rgb8", img).toImageMsg());
}
return armors;
}
void ArmorDetectorNode::createDebugPublishers()
{
lights_data_pub_ =
this->create_publisher<auto_aim_interfaces::msg::DebugLights>("/detector/debug_lights", 10);
armors_data_pub_ =
this->create_publisher<auto_aim_interfaces::msg::DebugArmors>("/detector/debug_armors", 10);
binary_img_pub_ = image_transport::create_publisher(this, "/detector/binary_img");
number_img_pub_ = image_transport::create_publisher(this, "/detector/number_img");
result_img_pub_ = image_transport::create_publisher(this, "/detector/result_img");
}
void ArmorDetectorNode::destroyDebugPublishers()
{
lights_data_pub_.reset();
armors_data_pub_.reset();
binary_img_pub_.shutdown();
number_img_pub_.shutdown();
result_img_pub_.shutdown();
}
void ArmorDetectorNode::publishMarkers()
{
using Marker = visualization_msgs::msg::Marker;
armor_marker_.action = armors_msg_.armors.empty() ? Marker::DELETE : Marker::ADD;
marker_array_.markers.emplace_back(armor_marker_);
marker_pub_->publish(marker_array_);
}
} // namespace rm_auto_aim
#include "rclcpp_components/register_node_macro.hpp"
// Register the component with class_loader.
// This acts as a sort of entry point, allowing the component to be discoverable when its library
// is being loaded into a running process.
RCLCPP_COMPONENTS_REGISTER_NODE(rm_auto_aim::ArmorDetectorNode)

View File

@ -1,147 +0,0 @@
// Copyright 2022 Chen Jun
// Licensed under the MIT License.
// OpenCV
#include <opencv2/core.hpp>
#include <opencv2/core/mat.hpp>
#include <opencv2/core/types.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/opencv.hpp>
// STL
#include <algorithm>
#include <cstddef>
#include <fstream>
#include <map>
#include <string>
#include <vector>
#include "armor_detector/armor.hpp"
#include "armor_detector/number_classifier.hpp"
namespace rm_auto_aim
{
NumberClassifier::NumberClassifier(
const std::string & model_path, const std::string & label_path, const double thre,
const std::vector<std::string> & ignore_classes)
: threshold(thre), ignore_classes_(ignore_classes)
{
net_ = cv::dnn::readNetFromONNX(model_path);
std::ifstream label_file(label_path);
std::string line;
while (std::getline(label_file, line)) {
class_names_.push_back(line);
}
}
void NumberClassifier::extractNumbers(const cv::Mat & src, std::vector<Armor> & armors)
{
// Light length in image
const int light_length = 12;
// Image size after warp
const int warp_height = 28;
const int small_armor_width = 32;
const int large_armor_width = 54;
// Number ROI size
const cv::Size roi_size(20, 28);
for (auto & armor : armors) {
// Warp perspective transform
cv::Point2f lights_vertices[4] = {
armor.left_light.bottom, armor.left_light.top, armor.right_light.top,
armor.right_light.bottom};
const int top_light_y = (warp_height - light_length) / 2 - 1;
const int bottom_light_y = top_light_y + light_length;
const int warp_width = armor.type == ArmorType::SMALL ? small_armor_width : large_armor_width;
cv::Point2f target_vertices[4] = {
cv::Point(0, bottom_light_y),
cv::Point(0, top_light_y),
cv::Point(warp_width - 1, top_light_y),
cv::Point(warp_width - 1, bottom_light_y),
};
cv::Mat number_image;
auto rotation_matrix = cv::getPerspectiveTransform(lights_vertices, target_vertices);
cv::warpPerspective(src, number_image, rotation_matrix, cv::Size(warp_width, warp_height));
// Get ROI
number_image =
number_image(cv::Rect(cv::Point((warp_width - roi_size.width) / 2, 0), roi_size));
// Binarize
cv::cvtColor(number_image, number_image, cv::COLOR_RGB2GRAY);
cv::threshold(number_image, number_image, 0, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
armor.number_img = number_image;
}
}
void NumberClassifier::classify(std::vector<Armor> & armors)
{
for (auto & armor : armors) {
cv::Mat image = armor.number_img.clone();
// Normalize
image = image / 255.0;
// Create blob from image
cv::Mat blob;
cv::dnn::blobFromImage(image, blob);
// Set the input blob for the neural network
net_.setInput(blob);
// Forward pass the image blob through the model
cv::Mat outputs = net_.forward();
// Do softmax
float max_prob = *std::max_element(outputs.begin<float>(), outputs.end<float>());
cv::Mat softmax_prob;
cv::exp(outputs - max_prob, softmax_prob);
float sum = static_cast<float>(cv::sum(softmax_prob)[0]);
softmax_prob /= sum;
double confidence;
cv::Point class_id_point;
minMaxLoc(softmax_prob.reshape(1, 1), nullptr, &confidence, nullptr, &class_id_point);
int label_id = class_id_point.x;
armor.confidence = confidence;
armor.number = class_names_[label_id];
std::stringstream result_ss;
result_ss << armor.number << ": " << std::fixed << std::setprecision(1)
<< armor.confidence * 100.0 << "%";
armor.classfication_result = result_ss.str();
}
armors.erase(
std::remove_if(
armors.begin(), armors.end(),
[this](const Armor & armor) {
if (armor.confidence < threshold) {
return true;
}
for (const auto & ignore_class : ignore_classes_) {
if (armor.number == ignore_class) {
return true;
}
}
bool mismatch_armor_type = false;
if (armor.type == ArmorType::LARGE) {
mismatch_armor_type =
armor.number == "outpost" || armor.number == "2" || armor.number == "guard";
} else if (armor.type == ArmorType::SMALL) {
mismatch_armor_type = armor.number == "1" || armor.number == "base";
}
return mismatch_armor_type;
}),
armors.end());
}
} // namespace rm_auto_aim

View File

@ -1,58 +0,0 @@
// Copyright 2022 Chen Jun
#include "armor_detector/pnp_solver.hpp"
#include <opencv2/calib3d.hpp>
#include <vector>
namespace rm_auto_aim
{
PnPSolver::PnPSolver(
const std::array<double, 9> & camera_matrix, const std::vector<double> & dist_coeffs)
: camera_matrix_(cv::Mat(3, 3, CV_64F, const_cast<double *>(camera_matrix.data())).clone()),
dist_coeffs_(cv::Mat(1, 5, CV_64F, const_cast<double *>(dist_coeffs.data())).clone())
{
// Unit: m
constexpr double small_half_y = SMALL_ARMOR_WIDTH / 2.0 / 1000.0;
constexpr double small_half_z = SMALL_ARMOR_HEIGHT / 2.0 / 1000.0;
constexpr double large_half_y = LARGE_ARMOR_WIDTH / 2.0 / 1000.0;
constexpr double large_half_z = LARGE_ARMOR_HEIGHT / 2.0 / 1000.0;
// Start from bottom left in clockwise order
// Model coordinate: x forward, y left, z up
small_armor_points_.emplace_back(cv::Point3f(0, small_half_y, -small_half_z));
small_armor_points_.emplace_back(cv::Point3f(0, small_half_y, small_half_z));
small_armor_points_.emplace_back(cv::Point3f(0, -small_half_y, small_half_z));
small_armor_points_.emplace_back(cv::Point3f(0, -small_half_y, -small_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, large_half_y, -large_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, large_half_y, large_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, -large_half_y, large_half_z));
large_armor_points_.emplace_back(cv::Point3f(0, -large_half_y, -large_half_z));
}
bool PnPSolver::solvePnP(const Armor & armor, cv::Mat & rvec, cv::Mat & tvec)
{
std::vector<cv::Point2f> image_armor_points;
// Fill in image points
image_armor_points.emplace_back(armor.left_light.bottom);
image_armor_points.emplace_back(armor.left_light.top);
image_armor_points.emplace_back(armor.right_light.top);
image_armor_points.emplace_back(armor.right_light.bottom);
// Solve pnp
auto object_points = armor.type == ArmorType::SMALL ? small_armor_points_ : large_armor_points_;
return cv::solvePnP(
object_points, image_armor_points, camera_matrix_, dist_coeffs_, rvec, tvec, false,
cv::SOLVEPNP_IPPE);
}
float PnPSolver::calculateDistanceToCenter(const cv::Point2f & image_point)
{
float cx = camera_matrix_.at<double>(0, 2);
float cy = camera_matrix_.at<double>(1, 2);
return cv::norm(image_point - cv::Point2f(cx, cy));
}
} // namespace rm_auto_aim

View File

@ -1,28 +0,0 @@
// Copyright 2022 Chen Jun
#include <gtest/gtest.h>
#include <rclcpp/executors.hpp>
#include <rclcpp/node_options.hpp>
#include <rclcpp/utilities.hpp>
// STD
#include <memory>
#include "armor_detector/detector_node.hpp"
TEST(ArmorDetectorNodeTest, NodeStartupTest)
{
rclcpp::NodeOptions options;
auto node = std::make_shared<rm_auto_aim::ArmorDetectorNode>(options);
node.reset();
}
int main(int argc, char ** argv)
{
testing::InitGoogleTest(&argc, argv);
rclcpp::init(argc, argv);
auto result = RUN_ALL_TESTS();
rclcpp::shutdown();
return result;
}

View File

@ -1,53 +0,0 @@
// Copyright 2022 Chen Jun
#include <gtest/gtest.h>
#include <ament_index_cpp/get_package_share_directory.hpp>
#include <opencv2/core/mat.hpp>
// STL
#include <algorithm>
#include <chrono>
#include <iostream>
#include <map>
#include <vector>
#include "armor_detector/number_classifier.hpp"
using hrc = std::chrono::high_resolution_clock;
TEST(test_nc, benchmark)
{
auto pkg_path = ament_index_cpp::get_package_share_directory("armor_detector");
auto model_path = pkg_path + "/model/mlp.onnx";
auto label_path = pkg_path + "/model/label.txt";
rm_auto_aim::NumberClassifier nc(model_path, label_path, 0.5);
auto dummy_armors = std::vector<rm_auto_aim::Armor>(1);
auto test_mat = cv::Mat(20, 28, CV_8UC1);
dummy_armors[0].number_img = test_mat;
int loop_num = 200;
int warm_up = 30;
double time_min = DBL_MAX;
double time_max = -DBL_MAX;
double time_avg = 0;
for (int i = 0; i < warm_up + loop_num; i++) {
auto start = hrc::now();
nc.classify(dummy_armors);
auto end = hrc::now();
double time = std::chrono::duration<double, std::milli>(end - start).count();
if (i >= warm_up) {
time_min = std::min(time_min, time);
time_max = std::max(time_max, time);
time_avg += time;
}
}
time_avg /= loop_num;
std::cout << "time_min: " << time_min << "ms" << std::endl;
std::cout << "time_max: " << time_max << "ms" << std::endl;
std::cout << "time_avg: " << time_avg << "ms" << std::endl;
}

View File

@ -1,58 +0,0 @@
cmake_minimum_required(VERSION 3.10)
project(armor_tracker)
## Use C++14
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
## By adding -Wall and -Werror, the compiler does not ignore warnings anymore,
## enforcing cleaner code.
add_definitions(-Wall -Werror)
## Export compile commands for clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
#######################
## Find dependencies ##
#######################
find_package(ament_cmake_auto REQUIRED)
ament_auto_find_build_dependencies()
###########
## Build ##
###########
ament_auto_add_library(${PROJECT_NAME} SHARED
DIRECTORY src
)
rclcpp_components_register_node(${PROJECT_NAME}
PLUGIN rm_auto_aim::ArmorTrackerNode
EXECUTABLE ${PROJECT_NAME}_node
)
#############
## Testing ##
#############
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
list(APPEND AMENT_LINT_AUTO_EXCLUDE
ament_cmake_copyright
ament_cmake_uncrustify
ament_cmake_cpplint
)
ament_lint_auto_find_test_dependencies()
# find_package(ament_cmake_gtest)
# set(TEST_NAME test_kalman_filter)
# ament_add_gtest(${TEST_NAME} test/${TEST_NAME}.cpp)
# target_link_libraries(${TEST_NAME} ${PROJECT_NAME})
endif()
#############
## Install ##
#############
ament_auto_package()

View File

@ -1,84 +0,0 @@
# armor_tracker
- [ArmorTrackerNode](#armortrackernode)
- [Tracker](#tracker)
- [KalmanFilter](#kalmanfilter)
## ArmorTrackerNode
装甲板处理节点
订阅识别节点发布的装甲板三维位置及机器人的坐标转换信息将装甲板三维位置变换到指定惯性系一般是以云台中心为原点IMU 上电时的 Yaw 朝向为 X 轴的惯性系)下,然后将装甲板目标送入跟踪器中,输出跟踪机器人在指定惯性系下的状态
订阅:
- 已识别到的装甲板 `/detector/armors`
- 机器人的坐标转换信息 `/tf` `/tf_static`
发布:
- 最终锁定的目标 `/tracker/target`
参数:
- 跟踪器参数 tracker
- 两帧间目标可匹配的最大距离 max_match_distance
- `DETECTING` 状态进入 `TRACKING` 状态的阈值 tracking_threshold
- `TRACKING` 状态进入 `LOST` 状态的阈值 lost_threshold
## ExtendedKalmanFilter
$$ x_c = x_a + r * cos (\theta) $$
$$ y_c = y_a + r * sin (\theta) $$
$$ x = [x_c, y_c,z, yaw, v_{xc}, v_{yc},v_z, v_{yaw}, r]^T $$
参考 OpenCV 中的卡尔曼滤波器使用 Eigen 进行了实现
[卡尔曼滤波器](https://zh.wikipedia.org/wiki/%E5%8D%A1%E5%B0%94%E6%9B%BC%E6%BB%A4%E6%B3%A2)
![](docs/Kalman_filter_model.png)
考虑到自瞄任务中对于目标只有观测没有控制,所以输入-控制模型 $B$ 和控制器向量 $u$ 可忽略。
预测及更新的公式如下:
预测:
$$ x_{k|k-1} = F * x_{k-1|k-1} $$
$$ P_{k|k-1} = F * P_{k-1|k-1}* F^T + Q $$
更新:
$$ K = P_{k|k-1} * H^T * (H * P_{k|k-1} * H^T + R)^{-1} $$
$$ x_{k|k} = x_{k|k-1} + K * (z_k - H * x_{k|k-1}) $$
$$ P_{k|k} = (I - K * H) * P_{k|k-1} $$
## Tracker
参考 [SORT(Simple online and realtime tracking)](https://ieeexplore.ieee.org/abstract/document/7533003/) 中对于目标匹配的方法,使用卡尔曼滤波器对单目标在三维空间中进行跟踪
在此跟踪器中卡尔曼滤波器观测量为目标在指定惯性系中的位置xyz状态量为目标位置及速度xyz+vx vy vz
在对目标的运动模型建模为在指定惯性系中的匀速线性运动,即状态转移为 $x_{pre} = x_{post} + v_{post} * dt$
目标关联的判断依据为三维位置的 L2 欧式距离
跟踪器共有四个状态:
- `DETECTING` :短暂识别到目标,需要更多帧识别信息才能进入跟踪状态
- `TRACKING` :跟踪器正常跟踪目标中
- `TEMP_LOST` :跟踪器短暂丢失目标,通过卡尔曼滤波器预测目标
- `LOST` :跟踪器完全丢失目标
工作流程:
- init
跟踪器默认选择离相机光心最近的目标作为跟踪对象,选择目标后初始化卡尔曼滤波器,初始状态设为当前目标位置,速度设为 0
- update:
首先由卡尔曼滤波器得到目标在当前帧的预测位置,然后遍历当前帧中的目标位置与预测位置进行匹配,若当前帧不存在目标或所有目标位置与预测位置的偏差都过大则认为目标丢失,重置卡尔曼滤波器。
最后选取位置相差最小的目标作为最佳匹配项,更新卡尔曼滤波器,将更新后的状态作为跟踪器的结果输出

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

View File

@ -1,74 +0,0 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_PROCESSOR__KALMAN_FILTER_HPP_
#define ARMOR_PROCESSOR__KALMAN_FILTER_HPP_
#include <Eigen/Dense>
#include <functional>
namespace rm_auto_aim
{
class ExtendedKalmanFilter
{
public:
ExtendedKalmanFilter() = default;
using VecVecFunc = std::function<Eigen::VectorXd(const Eigen::VectorXd &)>;
using VecMatFunc = std::function<Eigen::MatrixXd(const Eigen::VectorXd &)>;
using VoidMatFunc = std::function<Eigen::MatrixXd()>;
explicit ExtendedKalmanFilter(
const VecVecFunc & f, const VecVecFunc & h, const VecMatFunc & j_f, const VecMatFunc & j_h,
const VoidMatFunc & u_q, const VecMatFunc & u_r, const Eigen::MatrixXd & P0);
// Set the initial state
void setState(const Eigen::VectorXd & x0);
// Compute a predicted state
Eigen::MatrixXd predict();
// Update the estimated state based on measurement
Eigen::MatrixXd update(const Eigen::VectorXd & z);
private:
// Process nonlinear vector function
VecVecFunc f;
// Observation nonlinear vector function
VecVecFunc h;
// Jacobian of f()
VecMatFunc jacobian_f;
Eigen::MatrixXd F;
// Jacobian of h()
VecMatFunc jacobian_h;
Eigen::MatrixXd H;
// Process noise covariance matrix
VoidMatFunc update_Q;
Eigen::MatrixXd Q;
// Measurement noise covariance matrix
VecMatFunc update_R;
Eigen::MatrixXd R;
// Priori error estimate covariance matrix
Eigen::MatrixXd P_pri;
// Posteriori error estimate covariance matrix
Eigen::MatrixXd P_post;
// Kalman gain
Eigen::MatrixXd K;
// System dimensions
int n;
// N-size identity
Eigen::MatrixXd I;
// Priori state
Eigen::VectorXd x_pri;
// Posteriori state
Eigen::VectorXd x_post;
};
} // namespace rm_auto_aim
#endif // ARMOR_PROCESSOR__KALMAN_FILTER_HPP_

View File

@ -1,87 +0,0 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_PROCESSOR__TRACKER_HPP_
#define ARMOR_PROCESSOR__TRACKER_HPP_
// Eigen
#include <Eigen/Eigen>
// ROS
#include <geometry_msgs/msg/point.hpp>
#include <geometry_msgs/msg/quaternion.hpp>
#include <geometry_msgs/msg/vector3.hpp>
// STD
#include <memory>
#include <string>
#include "armor_tracker/extended_kalman_filter.hpp"
#include "auto_aim_interfaces/msg/armors.hpp"
#include "auto_aim_interfaces/msg/target.hpp"
namespace rm_auto_aim
{
enum class ArmorsNum { NORMAL_4 = 4, BALANCE_2 = 2, OUTPOST_3 = 3 };
class Tracker
{
public:
Tracker(double max_match_distance, double max_match_yaw_diff);
using Armors = auto_aim_interfaces::msg::Armors;
using Armor = auto_aim_interfaces::msg::Armor;
void init(const Armors::SharedPtr & armors_msg);
void update(const Armors::SharedPtr & armors_msg);
ExtendedKalmanFilter ekf;
int tracking_thres;
int lost_thres;
enum State {
LOST,
DETECTING,
TRACKING,
TEMP_LOST,
} tracker_state;
std::string tracked_id;
Armor tracked_armor;
ArmorsNum tracked_armors_num;
double info_position_diff;
double info_yaw_diff;
Eigen::VectorXd measurement;
Eigen::VectorXd target_state;
// To store another pair of armors message
double dz, another_r;
private:
void initEKF(const Armor & a);
void updateArmorsNum(const Armor & a);
void handleArmorJump(const Armor & a);
double orientationToYaw(const geometry_msgs::msg::Quaternion & q);
Eigen::Vector3d getArmorPositionFromState(const Eigen::VectorXd & x);
double max_match_distance_;
double max_match_yaw_diff_;
int detect_count_;
int lost_count_;
double last_yaw_;
};
} // namespace rm_auto_aim
#endif // ARMOR_PROCESSOR__TRACKER_HPP_

View File

@ -1,76 +0,0 @@
// Copyright 2022 Chen Jun
#ifndef ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_
#define ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_
// ROS
#include <message_filters/subscriber.h>
#include <tf2_ros/buffer.h>
#include <tf2_ros/create_timer_ros.h>
#include <tf2_ros/message_filter.h>
#include <tf2_ros/transform_listener.h>
#include <rclcpp/rclcpp.hpp>
#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
#include <visualization_msgs/msg/marker_array.hpp>
// STD
#include <memory>
#include <string>
#include <vector>
#include "armor_tracker/tracker.hpp"
#include "auto_aim_interfaces/msg/armors.hpp"
#include "auto_aim_interfaces/msg/target.hpp"
#include "auto_aim_interfaces/msg/tracker_info.hpp"
namespace rm_auto_aim
{
using tf2_filter = tf2_ros::MessageFilter<auto_aim_interfaces::msg::Armors>;
class ArmorTrackerNode : public rclcpp::Node
{
public:
explicit ArmorTrackerNode(const rclcpp::NodeOptions & options);
private:
void armorsCallback(const auto_aim_interfaces::msg::Armors::SharedPtr armors_ptr);
void publishMarkers(const auto_aim_interfaces::msg::Target & target_msg);
// Maximum allowable armor distance in the XOY plane
double max_armor_distance_;
// The time when the last message was received
rclcpp::Time last_time_;
double dt_;
// Armor tracker
double s2qxyz_, s2qyaw_, s2qr_;
double r_xyz_factor, r_yaw;
double lost_time_thres_;
std::unique_ptr<Tracker> tracker_;
// Subscriber with tf2 message_filter
std::string target_frame_;
std::shared_ptr<tf2_ros::Buffer> tf2_buffer_;
std::shared_ptr<tf2_ros::TransformListener> tf2_listener_;
message_filters::Subscriber<auto_aim_interfaces::msg::Armors> armors_sub_;
std::shared_ptr<tf2_filter> tf2_filter_;
// Tracker info publisher
rclcpp::Publisher<auto_aim_interfaces::msg::TrackerInfo>::SharedPtr info_pub_;
// Publisher
rclcpp::Publisher<auto_aim_interfaces::msg::Target>::SharedPtr target_pub_;
// Visualization marker publisher
visualization_msgs::msg::Marker position_marker_;
visualization_msgs::msg::Marker linear_v_marker_;
visualization_msgs::msg::Marker angular_v_marker_;
visualization_msgs::msg::Marker armor_marker_;
rclcpp::Publisher<visualization_msgs::msg::MarkerArray>::SharedPtr marker_pub_;
};
} // namespace rm_auto_aim
#endif // ARMOR_PROCESSOR__PROCESSOR_NODE_HPP_

View File

@ -1,34 +0,0 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>armor_tracker</name>
<version>0.1.0</version>
<description>A template for ROS packages.</description>
<maintainer email="chen.junn@outlook.com">Chen Jun</maintainer>
<license>BSD</license>
<url type="website">https://github.com/chenjunnn/rm_auto_aim</url>
<url type="bugtracker">https://github.com/chenjunnn/rm_auto_aim/issues</url>
<author email="chen.junn@outlook.com">Chen Jun</author>
<!-- buildtool_depend: dependencies of the build process -->
<buildtool_depend>ament_cmake</buildtool_depend>
<!-- depend: build, export, and execution dependency -->
<depend>rclcpp</depend>
<depend>rclcpp_components</depend>
<depend>eigen</depend>
<depend>angles</depend>
<depend>geometry_msgs</depend>
<depend>visualization_msgs</depend>
<depend>message_filters</depend>
<depend>tf2_geometry_msgs</depend>
<depend>auto_aim_interfaces</depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<test_depend>ament_cmake_clang_format</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -1,51 +0,0 @@
// Copyright 2022 Chen Jun
#include "armor_tracker/extended_kalman_filter.hpp"
namespace rm_auto_aim
{
ExtendedKalmanFilter::ExtendedKalmanFilter(
const VecVecFunc & f, const VecVecFunc & h, const VecMatFunc & j_f, const VecMatFunc & j_h,
const VoidMatFunc & u_q, const VecMatFunc & u_r, const Eigen::MatrixXd & P0)
: f(f),
h(h),
jacobian_f(j_f),
jacobian_h(j_h),
update_Q(u_q),
update_R(u_r),
P_post(P0),
n(P0.rows()),
I(Eigen::MatrixXd::Identity(n, n)),
x_pri(n),
x_post(n)
{
}
void ExtendedKalmanFilter::setState(const Eigen::VectorXd & x0) { x_post = x0; }
Eigen::MatrixXd ExtendedKalmanFilter::predict()
{
F = jacobian_f(x_post), Q = update_Q();
x_pri = f(x_post);
P_pri = F * P_post * F.transpose() + Q;
// handle the case when there will be no measurement before the next predict
x_post = x_pri;
P_post = P_pri;
return x_pri;
}
Eigen::MatrixXd ExtendedKalmanFilter::update(const Eigen::VectorXd & z)
{
H = jacobian_h(x_pri), R = update_R(z);
K = P_pri * H.transpose() * (H * P_pri * H.transpose() + R).inverse();
x_post = x_pri + K * (z - h(x_pri));
P_post = (I - K * H) * P_pri;
return x_post;
}
} // namespace rm_auto_aim

View File

@ -1,239 +0,0 @@
// Copyright 2022 Chen Jun
#include "armor_tracker/tracker.hpp"
#include <angles/angles.h>
#include <tf2/LinearMath/Matrix3x3.h>
#include <tf2/LinearMath/Quaternion.h>
#include <tf2/convert.h>
#include <rclcpp/logger.hpp>
#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
// STD
#include <cfloat>
#include <memory>
#include <string>
namespace rm_auto_aim
{
Tracker::Tracker(double max_match_distance, double max_match_yaw_diff)
: tracker_state(LOST),
tracked_id(std::string("")),
measurement(Eigen::VectorXd::Zero(4)),
target_state(Eigen::VectorXd::Zero(9)),
max_match_distance_(max_match_distance),
max_match_yaw_diff_(max_match_yaw_diff)
{
}
void Tracker::init(const Armors::SharedPtr & armors_msg)
{
if (armors_msg->armors.empty()) {
return;
}
// Simply choose the armor that is closest to image center
double min_distance = DBL_MAX;
tracked_armor = armors_msg->armors[0];
for (const auto & armor : armors_msg->armors) {
if (armor.distance_to_image_center < min_distance) {
min_distance = armor.distance_to_image_center;
tracked_armor = armor;
}
}
initEKF(tracked_armor);
RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "Init EKF!");
tracked_id = tracked_armor.number;
tracker_state = DETECTING;
updateArmorsNum(tracked_armor);
}
void Tracker::update(const Armors::SharedPtr & armors_msg)
{
// KF predict
Eigen::VectorXd ekf_prediction = ekf.predict();
RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "EKF predict");
bool matched = false;
// Use KF prediction as default target state if no matched armor is found
target_state = ekf_prediction;
if (!armors_msg->armors.empty()) {
// Find the closest armor with the same id
Armor same_id_armor;
int same_id_armors_count = 0;
auto predicted_position = getArmorPositionFromState(ekf_prediction);
double min_position_diff = DBL_MAX;
double yaw_diff = DBL_MAX;
for (const auto & armor : armors_msg->armors) {
// Only consider armors with the same id
if (armor.number == tracked_id) {
same_id_armor = armor;
same_id_armors_count++;
// Calculate the difference between the predicted position and the current armor position
auto p = armor.pose.position;
Eigen::Vector3d position_vec(p.x, p.y, p.z);
double position_diff = (predicted_position - position_vec).norm();
if (position_diff < min_position_diff) {
// Find the closest armor
min_position_diff = position_diff;
yaw_diff = abs(orientationToYaw(armor.pose.orientation) - ekf_prediction(6));
tracked_armor = armor;
}
}
}
// Store tracker info
info_position_diff = min_position_diff;
info_yaw_diff = yaw_diff;
// Check if the distance and yaw difference of closest armor are within the threshold
if (min_position_diff < max_match_distance_ && yaw_diff < max_match_yaw_diff_) {
// Matched armor found
matched = true;
auto p = tracked_armor.pose.position;
// Update EKF
double measured_yaw = orientationToYaw(tracked_armor.pose.orientation);
measurement = Eigen::Vector4d(p.x, p.y, p.z, measured_yaw);
target_state = ekf.update(measurement);
RCLCPP_DEBUG(rclcpp::get_logger("armor_tracker"), "EKF update");
} else if (same_id_armors_count == 1 && yaw_diff > max_match_yaw_diff_) {
// Matched armor not found, but there is only one armor with the same id
// and yaw has jumped, take this case as the target is spinning and armor jumped
handleArmorJump(same_id_armor);
} else {
// No matched armor found
RCLCPP_WARN(rclcpp::get_logger("armor_tracker"), "No matched armor found!");
}
}
// Prevent radius from spreading
if (target_state(8) < 0.12) {
target_state(8) = 0.12;
ekf.setState(target_state);
} else if (target_state(8) > 0.4) {
target_state(8) = 0.4;
ekf.setState(target_state);
}
// Tracking state machine
if (tracker_state == DETECTING) {
if (matched) {
detect_count_++;
if (detect_count_ > tracking_thres) {
detect_count_ = 0;
tracker_state = TRACKING;
}
} else {
detect_count_ = 0;
tracker_state = LOST;
}
} else if (tracker_state == TRACKING) {
if (!matched) {
tracker_state = TEMP_LOST;
lost_count_++;
}
} else if (tracker_state == TEMP_LOST) {
if (!matched) {
lost_count_++;
if (lost_count_ > lost_thres) {
lost_count_ = 0;
tracker_state = LOST;
}
} else {
tracker_state = TRACKING;
lost_count_ = 0;
}
}
}
void Tracker::initEKF(const Armor & a)
{
double xa = a.pose.position.x;
double ya = a.pose.position.y;
double za = a.pose.position.z;
last_yaw_ = 0;
double yaw = orientationToYaw(a.pose.orientation);
// Set initial position at 0.2m behind the target
target_state = Eigen::VectorXd::Zero(9);
double r = 0.26;
double xc = xa + r * cos(yaw);
double yc = ya + r * sin(yaw);
dz = 0, another_r = r;
target_state << xc, 0, yc, 0, za, 0, yaw, 0, r;
ekf.setState(target_state);
}
void Tracker::updateArmorsNum(const Armor & armor)
{
if (armor.type == "large" && (tracked_id == "3" || tracked_id == "4" || tracked_id == "5")) {
tracked_armors_num = ArmorsNum::BALANCE_2;
} else if (tracked_id == "outpost") {
tracked_armors_num = ArmorsNum::OUTPOST_3;
} else {
tracked_armors_num = ArmorsNum::NORMAL_4;
}
}
void Tracker::handleArmorJump(const Armor & current_armor)
{
double yaw = orientationToYaw(current_armor.pose.orientation);
target_state(6) = yaw;
updateArmorsNum(current_armor);
// Only 4 armors has 2 radius and height
if (tracked_armors_num == ArmorsNum::NORMAL_4) {
dz = target_state(4) - current_armor.pose.position.z;
target_state(4) = current_armor.pose.position.z;
std::swap(target_state(8), another_r);
}
RCLCPP_WARN(rclcpp::get_logger("armor_tracker"), "Armor jump!");
// If position difference is larger than max_match_distance_,
// take this case as the ekf diverged, reset the state
auto p = current_armor.pose.position;
Eigen::Vector3d current_p(p.x, p.y, p.z);
Eigen::Vector3d infer_p = getArmorPositionFromState(target_state);
if ((current_p - infer_p).norm() > max_match_distance_) {
double r = target_state(8);
target_state(0) = p.x + r * cos(yaw); // xc
target_state(1) = 0; // vxc
target_state(2) = p.y + r * sin(yaw); // yc
target_state(3) = 0; // vyc
target_state(4) = p.z; // za
target_state(5) = 0; // vza
RCLCPP_ERROR(rclcpp::get_logger("armor_tracker"), "Reset State!");
}
ekf.setState(target_state);
}
double Tracker::orientationToYaw(const geometry_msgs::msg::Quaternion & q)
{
// Get armor yaw
tf2::Quaternion tf_q;
tf2::fromMsg(q, tf_q);
double roll, pitch, yaw;
tf2::Matrix3x3(tf_q).getRPY(roll, pitch, yaw);
// Make yaw change continuous (-pi~pi to -inf~inf)
yaw = last_yaw_ + angles::shortest_angular_distance(last_yaw_, yaw);
last_yaw_ = yaw;
return yaw;
}
Eigen::Vector3d Tracker::getArmorPositionFromState(const Eigen::VectorXd & x)
{
// Calculate predicted position of the current armor
double xc = x(0), yc = x(2), za = x(4);
double yaw = x(6), r = x(8);
double xa = xc - r * cos(yaw);
double ya = yc - r * sin(yaw);
return Eigen::Vector3d(xa, ya, za);
}
} // namespace rm_auto_aim

View File

@ -1,336 +0,0 @@
// Copyright 2022 Chen Jun
#include "armor_tracker/tracker_node.hpp"
// STD
#include <memory>
#include <vector>
namespace rm_auto_aim
{
ArmorTrackerNode::ArmorTrackerNode(const rclcpp::NodeOptions & options)
: Node("armor_tracker", options)
{
RCLCPP_INFO(this->get_logger(), "Starting TrackerNode!");
// Maximum allowable armor distance in the XOY plane
max_armor_distance_ = this->declare_parameter("max_armor_distance", 10.0);
// Tracker
double max_match_distance = this->declare_parameter("tracker.max_match_distance", 0.15);
double max_match_yaw_diff = this->declare_parameter("tracker.max_match_yaw_diff", 1.0);
tracker_ = std::make_unique<Tracker>(max_match_distance, max_match_yaw_diff);
tracker_->tracking_thres = this->declare_parameter("tracker.tracking_thres", 5);
lost_time_thres_ = this->declare_parameter("tracker.lost_time_thres", 0.3);
// EKF
// xa = x_armor, xc = x_robot_center
// state: xc, v_xc, yc, v_yc, za, v_za, yaw, v_yaw, r
// measurement: xa, ya, za, yaw
// f - Process function
auto f = [this](const Eigen::VectorXd & x) {
Eigen::VectorXd x_new = x;
x_new(0) += x(1) * dt_;
x_new(2) += x(3) * dt_;
x_new(4) += x(5) * dt_;
x_new(6) += x(7) * dt_;
return x_new;
};
// J_f - Jacobian of process function
auto j_f = [this](const Eigen::VectorXd &) {
Eigen::MatrixXd f(9, 9);
// clang-format off
f << 1, dt_, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, dt_, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, dt_, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, dt_, 0,
0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 1;
// clang-format on
return f;
};
// h - Observation function
auto h = [](const Eigen::VectorXd & x) {
Eigen::VectorXd z(4);
double xc = x(0), yc = x(2), yaw = x(6), r = x(8);
z(0) = xc - r * cos(yaw); // xa
z(1) = yc - r * sin(yaw); // ya
z(2) = x(4); // za
z(3) = x(6); // yaw
return z;
};
// J_h - Jacobian of observation function
auto j_h = [](const Eigen::VectorXd & x) {
Eigen::MatrixXd h(4, 9);
double yaw = x(6), r = x(8);
// clang-format off
// xc v_xc yc v_yc za v_za yaw v_yaw r
h << 1, 0, 0, 0, 0, 0, r*sin(yaw), 0, -cos(yaw),
0, 0, 1, 0, 0, 0, -r*cos(yaw),0, -sin(yaw),
0, 0, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0;
// clang-format on
return h;
};
// update_Q - process noise covariance matrix
s2qxyz_ = declare_parameter("ekf.sigma2_q_xyz", 20.0);
s2qyaw_ = declare_parameter("ekf.sigma2_q_yaw", 100.0);
s2qr_ = declare_parameter("ekf.sigma2_q_r", 800.0);
auto u_q = [this]() {
Eigen::MatrixXd q(9, 9);
double t = dt_, x = s2qxyz_, y = s2qyaw_, r = s2qr_;
double q_x_x = pow(t, 4) / 4 * x, q_x_vx = pow(t, 3) / 2 * x, q_vx_vx = pow(t, 2) * x;
double q_y_y = pow(t, 4) / 4 * y, q_y_vy = pow(t, 3) / 2 * x, q_vy_vy = pow(t, 2) * y;
double q_r = pow(t, 4) / 4 * r;
// clang-format off
// xc v_xc yc v_yc za v_za yaw v_yaw r
q << q_x_x, q_x_vx, 0, 0, 0, 0, 0, 0, 0,
q_x_vx, q_vx_vx,0, 0, 0, 0, 0, 0, 0,
0, 0, q_x_x, q_x_vx, 0, 0, 0, 0, 0,
0, 0, q_x_vx, q_vx_vx,0, 0, 0, 0, 0,
0, 0, 0, 0, q_x_x, q_x_vx, 0, 0, 0,
0, 0, 0, 0, q_x_vx, q_vx_vx,0, 0, 0,
0, 0, 0, 0, 0, 0, q_y_y, q_y_vy, 0,
0, 0, 0, 0, 0, 0, q_y_vy, q_vy_vy,0,
0, 0, 0, 0, 0, 0, 0, 0, q_r;
// clang-format on
return q;
};
// update_R - measurement noise covariance matrix
r_xyz_factor = declare_parameter("ekf.r_xyz_factor", 0.05);
r_yaw = declare_parameter("ekf.r_yaw", 0.02);
auto u_r = [this](const Eigen::VectorXd & z) {
Eigen::DiagonalMatrix<double, 4> r;
double x = r_xyz_factor;
r.diagonal() << abs(x * z[0]), abs(x * z[1]), abs(x * z[2]), r_yaw;
return r;
};
// P - error estimate covariance matrix
Eigen::DiagonalMatrix<double, 9> p0;
p0.setIdentity();
tracker_->ekf = ExtendedKalmanFilter{f, h, j_f, j_h, u_q, u_r, p0};
// Subscriber with tf2 message_filter
// tf2 relevant
tf2_buffer_ = std::make_shared<tf2_ros::Buffer>(this->get_clock());
// Create the timer interface before call to waitForTransform,
// to avoid a tf2_ros::CreateTimerInterfaceException exception
auto timer_interface = std::make_shared<tf2_ros::CreateTimerROS>(
this->get_node_base_interface(), this->get_node_timers_interface());
tf2_buffer_->setCreateTimerInterface(timer_interface);
tf2_listener_ = std::make_shared<tf2_ros::TransformListener>(*tf2_buffer_);
// subscriber and filter
armors_sub_.subscribe(this, "/detector/armors", rmw_qos_profile_sensor_data);
target_frame_ = this->declare_parameter("target_frame", "odom");
tf2_filter_ = std::make_shared<tf2_filter>(
armors_sub_, *tf2_buffer_, target_frame_, 10, this->get_node_logging_interface(),
this->get_node_clock_interface(), std::chrono::duration<int>(1));
// Register a callback with tf2_ros::MessageFilter to be called when transforms are available
tf2_filter_->registerCallback(&ArmorTrackerNode::armorsCallback, this);
// Measurement publisher (for debug usage)
info_pub_ = this->create_publisher<auto_aim_interfaces::msg::TrackerInfo>("/tracker/info", 10);
// Publisher
target_pub_ = this->create_publisher<auto_aim_interfaces::msg::Target>(
"/tracker/target", rclcpp::SensorDataQoS());
// Visualization Marker Publisher
// See http://wiki.ros.org/rviz/DisplayTypes/Marker
position_marker_.ns = "position";
position_marker_.type = visualization_msgs::msg::Marker::SPHERE;
position_marker_.scale.x = position_marker_.scale.y = position_marker_.scale.z = 0.1;
position_marker_.color.a = 1.0;
position_marker_.color.g = 1.0;
linear_v_marker_.type = visualization_msgs::msg::Marker::ARROW;
linear_v_marker_.ns = "linear_v";
linear_v_marker_.scale.x = 0.03;
linear_v_marker_.scale.y = 0.05;
linear_v_marker_.color.a = 1.0;
linear_v_marker_.color.r = 1.0;
linear_v_marker_.color.g = 1.0;
angular_v_marker_.type = visualization_msgs::msg::Marker::ARROW;
angular_v_marker_.ns = "angular_v";
angular_v_marker_.scale.x = 0.03;
angular_v_marker_.scale.y = 0.05;
angular_v_marker_.color.a = 1.0;
angular_v_marker_.color.b = 1.0;
angular_v_marker_.color.g = 1.0;
armor_marker_.ns = "armors";
armor_marker_.type = visualization_msgs::msg::Marker::CUBE;
armor_marker_.scale.x = 0.03;
armor_marker_.scale.z = 0.125;
armor_marker_.color.a = 1.0;
armor_marker_.color.r = 1.0;
marker_pub_ = this->create_publisher<visualization_msgs::msg::MarkerArray>("/tracker/marker", 10);
}
void ArmorTrackerNode::armorsCallback(const auto_aim_interfaces::msg::Armors::SharedPtr armors_msg)
{
// Tranform armor position from image frame to world coordinate
for (auto & armor : armors_msg->armors) {
geometry_msgs::msg::PoseStamped ps;
ps.header = armors_msg->header;
ps.pose = armor.pose;
try {
armor.pose = tf2_buffer_->transform(ps, target_frame_).pose;
} catch (const tf2::ExtrapolationException & ex) {
RCLCPP_ERROR(get_logger(), "Error while transforming %s", ex.what());
return;
}
}
// Filter abnormal armors
armors_msg->armors.erase(
std::remove_if(
armors_msg->armors.begin(), armors_msg->armors.end(),
[this](const auto_aim_interfaces::msg::Armor & armor) {
return abs(armor.pose.position.z) > 1.2 ||
Eigen::Vector2d(armor.pose.position.x, armor.pose.position.y).norm() >
max_armor_distance_;
}),
armors_msg->armors.end());
// Init message
auto_aim_interfaces::msg::TrackerInfo info_msg;
auto_aim_interfaces::msg::Target target_msg;
rclcpp::Time time = armors_msg->header.stamp;
target_msg.header.stamp = time;
target_msg.header.frame_id = target_frame_;
// Update tracker
if (tracker_->tracker_state == Tracker::LOST) {
tracker_->init(armors_msg);
target_msg.tracking = false;
} else {
dt_ = (time - last_time_).seconds();
tracker_->lost_thres = static_cast<int>(lost_time_thres_ / dt_);
tracker_->update(armors_msg);
// Publish Info
info_msg.position_diff = tracker_->info_position_diff;
info_msg.yaw_diff = tracker_->info_yaw_diff;
info_msg.position.x = tracker_->measurement(0);
info_msg.position.y = tracker_->measurement(1);
info_msg.position.z = tracker_->measurement(2);
info_msg.yaw = tracker_->measurement(3);
info_pub_->publish(info_msg);
if (tracker_->tracker_state == Tracker::DETECTING) {
target_msg.tracking = false;
} else if (
tracker_->tracker_state == Tracker::TRACKING ||
tracker_->tracker_state == Tracker::TEMP_LOST) {
target_msg.tracking = true;
// Fill target message
const auto & state = tracker_->target_state;
target_msg.id = tracker_->tracked_id;
target_msg.armors_num = static_cast<int>(tracker_->tracked_armors_num);
target_msg.position.x = state(0);
target_msg.velocity.x = state(1);
target_msg.position.y = state(2);
target_msg.velocity.y = state(3);
target_msg.position.z = state(4);
target_msg.velocity.z = state(5);
target_msg.yaw = state(6);
target_msg.v_yaw = state(7);
target_msg.radius_1 = state(8);
target_msg.radius_2 = tracker_->another_r;
target_msg.dz = tracker_->dz;
}
}
last_time_ = time;
target_pub_->publish(target_msg);
publishMarkers(target_msg);
}
void ArmorTrackerNode::publishMarkers(const auto_aim_interfaces::msg::Target & target_msg)
{
position_marker_.header = target_msg.header;
linear_v_marker_.header = target_msg.header;
angular_v_marker_.header = target_msg.header;
armor_marker_.header = target_msg.header;
visualization_msgs::msg::MarkerArray marker_array;
if (target_msg.tracking) {
double yaw = target_msg.yaw, r1 = target_msg.radius_1, r2 = target_msg.radius_2;
double xc = target_msg.position.x, yc = target_msg.position.y, za = target_msg.position.z;
double vx = target_msg.velocity.x, vy = target_msg.velocity.y, vz = target_msg.velocity.z;
double dz = target_msg.dz;
position_marker_.action = visualization_msgs::msg::Marker::ADD;
position_marker_.pose.position.x = xc;
position_marker_.pose.position.y = yc;
position_marker_.pose.position.z = za + dz / 2;
linear_v_marker_.action = visualization_msgs::msg::Marker::ADD;
linear_v_marker_.points.clear();
linear_v_marker_.points.emplace_back(position_marker_.pose.position);
geometry_msgs::msg::Point arrow_end = position_marker_.pose.position;
arrow_end.x += vx;
arrow_end.y += vy;
arrow_end.z += vz;
linear_v_marker_.points.emplace_back(arrow_end);
angular_v_marker_.action = visualization_msgs::msg::Marker::ADD;
angular_v_marker_.points.clear();
angular_v_marker_.points.emplace_back(position_marker_.pose.position);
arrow_end = position_marker_.pose.position;
arrow_end.z += target_msg.v_yaw / M_PI;
angular_v_marker_.points.emplace_back(arrow_end);
armor_marker_.action = visualization_msgs::msg::Marker::ADD;
armor_marker_.scale.y = tracker_->tracked_armor.type == "small" ? 0.135 : 0.23;
bool is_current_pair = true;
size_t a_n = target_msg.armors_num;
geometry_msgs::msg::Point p_a;
double r = 0;
for (size_t i = 0; i < a_n; i++) {
double tmp_yaw = yaw + i * (2 * M_PI / a_n);
// Only 4 armors has 2 radius and height
if (a_n == 4) {
r = is_current_pair ? r1 : r2;
p_a.z = za + (is_current_pair ? 0 : dz);
is_current_pair = !is_current_pair;
} else {
r = r1;
p_a.z = za;
}
p_a.x = xc - r * cos(tmp_yaw);
p_a.y = yc - r * sin(tmp_yaw);
armor_marker_.id = i;
armor_marker_.pose.position = p_a;
tf2::Quaternion q;
q.setRPY(0, target_msg.id == "outpost" ? -0.26 : 0.26, tmp_yaw);
armor_marker_.pose.orientation = tf2::toMsg(q);
marker_array.markers.emplace_back(armor_marker_);
}
} else {
position_marker_.action = visualization_msgs::msg::Marker::DELETE;
linear_v_marker_.action = visualization_msgs::msg::Marker::DELETE;
angular_v_marker_.action = visualization_msgs::msg::Marker::DELETE;
armor_marker_.action = visualization_msgs::msg::Marker::DELETE;
marker_array.markers.emplace_back(armor_marker_);
}
marker_array.markers.emplace_back(position_marker_);
marker_array.markers.emplace_back(linear_v_marker_);
marker_array.markers.emplace_back(angular_v_marker_);
marker_pub_->publish(marker_array);
}
} // namespace rm_auto_aim
#include "rclcpp_components/register_node_macro.hpp"
// Register the component with class_loader.
// This acts as a sort of entry point, allowing the component to be discoverable when its library
// is being loaded into a running process.
RCLCPP_COMPONENTS_REGISTER_NODE(rm_auto_aim::ArmorTrackerNode)

View File

@ -1,30 +0,0 @@
cmake_minimum_required(VERSION 3.8)
project(auto_aim_interfaces)
if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
add_compile_options(-Wall -Wextra -Wpedantic)
endif()
# find dependencies
find_package(ament_cmake REQUIRED)
find_package(rosidl_default_generators REQUIRED)
find_package(geometry_msgs REQUIRED)
rosidl_generate_interfaces(${PROJECT_NAME}
"msg/Armor.msg"
"msg/Armors.msg"
"msg/Target.msg"
"msg/DebugLight.msg"
"msg/DebugLights.msg"
"msg/DebugArmor.msg"
"msg/DebugArmors.msg"
"msg/TrackerInfo.msg"
DEPENDENCIES
std_msgs
geometry_msgs
)
ament_package()

View File

@ -1,4 +0,0 @@
string number
string type
float32 distance_to_image_center
geometry_msgs/Pose pose

View File

@ -1,2 +0,0 @@
std_msgs/Header header
Armor[] armors

View File

@ -1,5 +0,0 @@
int32 center_x
string type
float32 light_ratio
float32 center_distance
float32 angle

View File

@ -1 +0,0 @@
DebugArmor[] data

View File

@ -1,4 +0,0 @@
int32 center_x
bool is_light
float32 ratio
float32 angle

View File

@ -1 +0,0 @@
DebugLight[] data

View File

@ -1,11 +0,0 @@
std_msgs/Header header
bool tracking
string id
int32 armors_num
geometry_msgs/Point position
geometry_msgs/Vector3 velocity
float64 yaw
float64 v_yaw
float64 radius_1
float64 radius_2
float64 dz

View File

@ -1,6 +0,0 @@
# Difference between the current measurement and prediction
float64 position_diff
float64 yaw_diff
# Unfiltered position and yaw
geometry_msgs/Point position
float64 yaw

View File

@ -1,21 +0,0 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>auto_aim_interfaces</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="chen.junn@outlook.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<build_depend>rosidl_default_generators</build_depend>
<exec_depend>rosidl_default_runtime</exec_depend>
<member_of_group>rosidl_interface_packages</member_of_group>
<depend>geometry_msgs</depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="100%" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="100%" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:vectornator="http://vectornator.io" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="matrix(1 0 0 1 6.39488e-14 157.46)" gradientUnits="userSpaceOnUse" id="LinearGradient" x1="375.972" x2="944.796" y1="453.158" y2="453.158">
<stop offset="0" stop-color="#ff8b00"/>
<stop offset="1" stop-color="#ff5900"/>
</linearGradient>
</defs>
<g id="图层-2" vectornator:layerName="图层 2">
<path d="M79.2038 285.449L524.109 285.848C524.109 285.848 651.69 302.769 654.969 439.05C658.247 575.332 521.965 594.533 521.965 594.533L292.315 596.568L242.362 778.529L101.844 777.892L181.788 485.967L512.496 484.668C512.496 484.668 542.064 478.698 541.78 438.895C541.496 399.092 503.683 395.965 503.683 395.965L340.206 395.68L406.734 471.875L248.235 472.328L79.2038 285.449Z" fill="#5a5655" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="图层-3" vectornator:layerName="图层 3">
<path d="M375.972 608.813L535.485 608.74L576.68 655.417L788.749 378.975L944.796 378.807L592.763 842.427" fill="url(#LinearGradient)" fill-rule="nonzero" opacity="1" stroke="#5a5655" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="0"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -1,12 +0,0 @@
cmake_minimum_required(VERSION 3.8)
project(rm_auto_aim)
find_package(ament_cmake_auto REQUIRED)
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
set(ament_cmake_copyright_FOUND TRUE)
ament_lint_auto_find_test_dependencies()
endif()
ament_auto_package()

View File

@ -1,22 +0,0 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>rm_auto_aim</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="chen.junn@outlook.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<depend>auto_aim_interfaces</depend>
<depend>armor_detector</depend>
<depend>armor_tracker</depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -1,18 +0,0 @@
cmake_minimum_required(VERSION 3.8)
project(rm_gimbal_description)
# find dependencies
find_package(ament_cmake REQUIRED)
# Install files
install(DIRECTORY
urdf
DESTINATION share/${PROJECT_NAME})
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
set(ament_cmake_copyright_FOUND TRUE)
ament_lint_auto_find_test_dependencies()
endif()
ament_package()

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,26 +0,0 @@
# rm_gimbal_description
RoboMaster 视觉自瞄系统所需的 URDF
<img src="docs/rm_vision.svg" alt="rm_vision" width="200" height="200">
该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块
## 坐标系定义
单位和方向请参考 https://www.ros.org/reps/rep-0103.html
odom: 以云台中心为原点的惯性系
yaw_joint: 表述云台的 yaw 轴与惯性系的旋转关系
pitch_joint: 表述云台的 pitch 轴与惯性系的旋转关系
camera_joint: 表述相机到惯性系的变换关系
camera_optical_joint: 表述以 z 轴为前方的相机坐标系转换为 x 轴为前方的相机坐标系的旋转关系
## 使用方法
修改 [urdf/rm_gimbal.urdf.xacro](urdf/rm_gimbal.urdf.xacro) 中的 `gimbal_camera_transfrom`
xyz 与 rpy 对应机器人云台上相机到云台中心的平移与旋转关系,可以由机械图纸测量得到,或在机器人上直接测量

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="100%" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="100%" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:vectornator="http://vectornator.io" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="matrix(1 0 0 1 6.39488e-14 157.46)" gradientUnits="userSpaceOnUse" id="LinearGradient" x1="375.972" x2="944.796" y1="453.158" y2="453.158">
<stop offset="0" stop-color="#ff8b00"/>
<stop offset="1" stop-color="#ff5900"/>
</linearGradient>
</defs>
<g id="图层-2" vectornator:layerName="图层 2">
<path d="M79.2038 285.449L524.109 285.848C524.109 285.848 651.69 302.769 654.969 439.05C658.247 575.332 521.965 594.533 521.965 594.533L292.315 596.568L242.362 778.529L101.844 777.892L181.788 485.967L512.496 484.668C512.496 484.668 542.064 478.698 541.78 438.895C541.496 399.092 503.683 395.965 503.683 395.965L340.206 395.68L406.734 471.875L248.235 472.328L79.2038 285.449Z" fill="#5a5655" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="图层-3" vectornator:layerName="图层 3">
<path d="M375.972 608.813L535.485 608.74L576.68 655.417L788.749 378.975L944.796 378.807L592.763 842.427" fill="url(#LinearGradient)" fill-rule="nonzero" opacity="1" stroke="#5a5655" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="0"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -1,21 +0,0 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>rm_gimbal_description</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="sink.chen@foxmail.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<exec_depend>launch_ros</exec_depend>
<exec_depend>xacro</exec_depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -1,44 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<robot name="rm_gimbal"
xmlns:xacro="http://ros.org/wiki/xacro">
<xacro:arg name="xyz" default="0.10 0 0.05" />
<xacro:arg name="rpy" default="0 0 0" />
<link name="odom" />
<link name="yaw_link" />
<joint name="yaw_joint" type="continuous">
<parent link="odom" />
<child link="yaw_link" />
<axis xyz="0 0 1" />
</joint>
<link name="pitch_link" />
<joint name="pitch_joint" type="continuous">
<parent link="yaw_link" />
<child link="pitch_link" />
<axis xyz="0 1 0" />
</joint>
<link name="camera_link" />
<joint name="camera_joint" type="fixed">
<origin xyz="$(arg xyz)" rpy="$(arg rpy)" />
<parent link="pitch_link" />
<child link="camera_link" />
<axis xyz="0 0 0" />
</joint>
<link name="camera_optical_frame" />
<joint name="camera_optical_joint" type="fixed">
<origin xyz="0 0 0" rpy="${-pi/2} 0 ${-pi/2}" />
<parent link="camera_link" />
<child link="camera_optical_frame" />
</joint>
</robot>

View File

@ -1,18 +0,0 @@
---
Language: Cpp
BasedOnStyle: Google
AccessModifierOffset: -2
AlignAfterOpenBracket: AlwaysBreak
BraceWrapping:
AfterClass: true
AfterFunction: true
AfterNamespace: true
AfterStruct: true
BreakBeforeBraces: Custom
ColumnLimit: 100
ConstructorInitializerIndentWidth: 0
ContinuationIndentWidth: 2
DerivePointerAlignment: false
PointerAlignment: Middle
ReflowComments: false

View File

@ -1,57 +0,0 @@
cmake_minimum_required(VERSION 3.10)
project(rm_serial_driver)
## Use C++14
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
## By adding -Wall and -Werror, the compiler does not ignore warnings anymore,
## enforcing cleaner code.
add_definitions(-Wall -Werror)
## Export compile commands for clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
#######################
## Find dependencies ##
#######################
find_package(ament_cmake_auto REQUIRED)
ament_auto_find_build_dependencies()
###########
## Build ##
###########
ament_auto_add_library(${PROJECT_NAME} SHARED
src/rm_serial_driver.cpp
src/crc.cpp
)
rclcpp_components_register_node(${PROJECT_NAME}
PLUGIN rm_serial_driver::RMSerialDriver
EXECUTABLE ${PROJECT_NAME}_node
)
#############
## Testing ##
#############
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
list(APPEND AMENT_LINT_AUTO_EXCLUDE
ament_cmake_copyright
ament_cmake_uncrustify
)
ament_lint_auto_find_test_dependencies()
endif()
#############
## Install ##
#############
ament_auto_package(
INSTALL_TO_SHARE
config
launch
)

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,36 +0,0 @@
# rm_serial_driver
RoboMaster 视觉系统与电控系统的串口通讯模块
<img src="docs/rm_vision.svg" alt="rm_vision" width="200" height="200">
该项目为 [rm_vision](https://github.com/chenjunnn/rm_vision) 的子模块
## Overview
本模块基于 [transport_drivers](https://github.com/ros-drivers/transport_drivers) 实现了 [rm_auto_aim](https://github.com/chenjunnn/rm_auto_aim) 项目与电控部分通讯的功能,也可作为使用 ros2 作为开发框架的参赛队的串口通讯模块参考
若有帮助请Star这个项目感谢~
## 使用指南
安装依赖 `sudo apt install ros-humble-serial-driver`
更改 [serial_driver.yaml](config/serial_driver.yaml) 中的参数以匹配与电控通讯的串口
启动串口模块 `ros2 launch rm_serial_driver serial_driver.launch.py`
## 发送和接收
详情请参考 [packet.hpp](include/rm_serial_driver/packet.hpp)
从电控端需要接受
- 机器人的自身颜色 `robot_color` 以判断对应的识别目标颜色
- 云台姿态 `pitch``yaw`, 单位和方向请参考 https://www.ros.org/reps/rep-0103.html
- 当前云台瞄准的位置 `aim_x, aim_y, aim_z`,用于发布可视化 Marker
视觉端发送 armor_tracker 的输出,即对于目标机器人的观测,具体的运动预测、装甲板选择、弹道解算在电控端完成
## 电控端的处理
TBD

View File

@ -1,8 +0,0 @@
/rm_serial_driver:
ros__parameters:
device_name: /dev/ttyACM0
baud_rate: 115200
flow_control: none
parity: none
stop_bits: "1"

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="100%" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="100%" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:vectornator="http://vectornator.io" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="matrix(1 0 0 1 6.39488e-14 157.46)" gradientUnits="userSpaceOnUse" id="LinearGradient" x1="375.972" x2="944.796" y1="453.158" y2="453.158">
<stop offset="0" stop-color="#ff8b00"/>
<stop offset="1" stop-color="#ff5900"/>
</linearGradient>
</defs>
<g id="图层-2" vectornator:layerName="图层 2">
<path d="M79.2038 285.449L524.109 285.848C524.109 285.848 651.69 302.769 654.969 439.05C658.247 575.332 521.965 594.533 521.965 594.533L292.315 596.568L242.362 778.529L101.844 777.892L181.788 485.967L512.496 484.668C512.496 484.668 542.064 478.698 541.78 438.895C541.496 399.092 503.683 395.965 503.683 395.965L340.206 395.68L406.734 471.875L248.235 472.328L79.2038 285.449Z" fill="#5a5655" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="图层-3" vectornator:layerName="图层 3">
<path d="M375.972 608.813L535.485 608.74L576.68 655.417L788.749 378.975L944.796 378.807L592.763 842.427" fill="url(#LinearGradient)" fill-rule="nonzero" opacity="1" stroke="#5a5655" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="0"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -1,29 +0,0 @@
// Copyright (c) 2022 ChenJun
// Licensed under the Apache-2.0 License.
#ifndef RM_SERIAL_DRIVER__CRC_HPP_
#define RM_SERIAL_DRIVER__CRC_HPP_
#include <cstdint>
namespace crc16
{
/**
* @brief CRC16 Verify function
* @param[in] pchMessage : Data to Verify,
* @param[in] dwLength : Stream length = Data + checksum
* @return : True or False (CRC Verify Result)
*/
uint32_t Verify_CRC16_Check_Sum(const uint8_t * pchMessage, uint32_t dwLength);
/**
* @brief Append CRC16 value to the end of the buffer
* @param[in] pchMessage : Data to Verify,
* @param[in] dwLength : Stream length = Data + checksum
* @return none
*/
void Append_CRC16_Check_Sum(uint8_t * pchMessage, uint32_t dwLength);
} // namespace crc16
#endif // RM_SERIAL_DRIVER__CRC_HPP_

View File

@ -1,65 +0,0 @@
// Copyright (c) 2022 ChenJun
// Licensed under the Apache-2.0 License.
#ifndef RM_SERIAL_DRIVER__PACKET_HPP_
#define RM_SERIAL_DRIVER__PACKET_HPP_
#include <algorithm>
#include <cstdint>
#include <vector>
namespace rm_serial_driver
{
struct ReceivePacket
{
uint8_t header = 0x5A;
uint8_t detect_color : 1; // 0-red 1-blue
uint8_t reserved : 7;
float pitch;
float yaw;
float aim_x;
float aim_y;
float aim_z;
uint16_t checksum = 0;
} __attribute__((packed));
struct SendPacket
{
uint8_t header = 0xA5;
bool tracking : 1;
uint8_t id : 3; // 0-outpost 6-guard 7-base
uint8_t armors_num : 3; // 2-balance 3-outpost 4-normal
uint8_t reserved : 1;
float x;
float y;
float z;
float yaw;
float vx;
float vy;
float vz;
float v_yaw;
float r1;
float r2;
float dz;
uint16_t checksum = 0;
} __attribute__((packed));
inline ReceivePacket fromVector(const std::vector<uint8_t> & data)
{
ReceivePacket packet;
std::copy(data.begin(), data.end(), reinterpret_cast<uint8_t *>(&packet));
return packet;
}
inline std::vector<uint8_t> toVector(const SendPacket & data)
{
std::vector<uint8_t> packet(sizeof(SendPacket));
std::copy(
reinterpret_cast<const uint8_t *>(&data),
reinterpret_cast<const uint8_t *>(&data) + sizeof(SendPacket), packet.begin());
return packet;
}
} // namespace rm_serial_driver
#endif // RM_SERIAL_DRIVER__PACKET_HPP_

View File

@ -1,65 +0,0 @@
// Copyright (c) 2022 ChenJun
// Licensed under the Apache-2.0 License.
#ifndef RM_SERIAL_DRIVER__RM_SERIAL_DRIVER_HPP_
#define RM_SERIAL_DRIVER__RM_SERIAL_DRIVER_HPP_
#include <rclcpp/publisher.hpp>
#include <rclcpp/rclcpp.hpp>
#include <rclcpp/subscription.hpp>
#include <sensor_msgs/msg/joint_state.hpp>
#include <serial_driver/serial_driver.hpp>
#include <std_msgs/msg/float64.hpp>
#include <visualization_msgs/msg/marker.hpp>
// C++ system
#include <memory>
#include <string>
#include <thread>
#include <vector>
#include "auto_aim_interfaces/msg/target.hpp"
namespace rm_serial_driver
{
class RMSerialDriver : public rclcpp::Node
{
public:
explicit RMSerialDriver(const rclcpp::NodeOptions & options);
~RMSerialDriver() override;
private:
void getParams();
void receiveData();
void sendData(auto_aim_interfaces::msg::Target::SharedPtr msg);
void reopenPort();
void setParam(const rclcpp::Parameter & param);
std::unique_ptr<IoContext> owned_ctx_;
std::string device_name_;
std::unique_ptr<drivers::serial_driver::SerialPortConfig> device_config_;
std::unique_ptr<drivers::serial_driver::SerialDriver> serial_driver_;
bool initial_set_param_ = false;
uint8_t previous_receive_color_ = 0;
rclcpp::AsyncParametersClient::SharedPtr detector_param_client_;
visualization_msgs::msg::Marker aiming_point_;
double timestamp_offset_ = 0;
rclcpp::Publisher<sensor_msgs::msg::JointState>::SharedPtr joint_state_pub_;
rclcpp::Subscription<auto_aim_interfaces::msg::Target>::SharedPtr target_sub_;
rclcpp::Publisher<std_msgs::msg::Float64>::SharedPtr latency_pub_;
rclcpp::Publisher<visualization_msgs::msg::Marker>::SharedPtr marker_pub_;
std::thread receive_thread_;
};
} // namespace rm_serial_driver
#endif // RM_SERIAL_DRIVER__RM_SERIAL_DRIVER_HPP_

View File

@ -1,21 +0,0 @@
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch_ros.actions import Node
def generate_launch_description():
config = os.path.join(
get_package_share_directory('rm_serial_driver'), 'config', 'serial_driver.yaml')
rm_serial_driver_node = Node(
package='rm_serial_driver',
executable='rm_serial_driver_node',
namespace='',
output='screen',
emulate_tty=True,
parameters=[config],
)
return LaunchDescription([rm_serial_driver_node])

View File

@ -1,33 +0,0 @@
<?xml version="1.0"?>
<?xml-model
href="http://download.ros.org/schema/package_format3.xsd"
schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>rm_serial_driver</name>
<version>0.1.0</version>
<description>A template for ROS packages.</description>
<maintainer email="chen.junn@outlook.com">Chen Jun</maintainer>
<license>MIT</license>
<url type="website">https://github.com/chenjunnn/ros2_mindvision_camera</url>
<url type="bugtracker">https://github.com/chenjunnn/ros2_mindvision_camera/issues</url>
<author email="chen.junn@outlook.com">Chen Jun</author>
<!-- buildtool_depend: dependencies of the build process -->
<buildtool_depend>ament_cmake</buildtool_depend>
<!-- depend: build, export, and execution dependency -->
<depend>rclcpp</depend>
<depend>rclcpp_components</depend>
<depend>serial_driver</depend>
<depend>sensor_msgs</depend>
<depend>visualization_msgs</depend>
<depend>auto_aim_interfaces</depend>
<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
<test_depend>ament_cmake_clang_format</test_depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -1,93 +0,0 @@
// Copyright (c) 2022 ChenJun
// Licensed under the Apache-2.0 License.
#include "rm_serial_driver/crc.hpp"
#include <cstdint>
namespace crc16
{
constexpr uint16_t CRC16_INIT = 0xFFFF;
constexpr uint16_t W_CRC_TABLE[256] = {
0x0000, 0x1189, 0x2312, 0x329b, 0x4624, 0x57ad, 0x6536, 0x74bf, 0x8c48, 0x9dc1, 0xaf5a, 0xbed3,
0xca6c, 0xdbe5, 0xe97e, 0xf8f7, 0x1081, 0x0108, 0x3393, 0x221a, 0x56a5, 0x472c, 0x75b7, 0x643e,
0x9cc9, 0x8d40, 0xbfdb, 0xae52, 0xdaed, 0xcb64, 0xf9ff, 0xe876, 0x2102, 0x308b, 0x0210, 0x1399,
0x6726, 0x76af, 0x4434, 0x55bd, 0xad4a, 0xbcc3, 0x8e58, 0x9fd1, 0xeb6e, 0xfae7, 0xc87c, 0xd9f5,
0x3183, 0x200a, 0x1291, 0x0318, 0x77a7, 0x662e, 0x54b5, 0x453c, 0xbdcb, 0xac42, 0x9ed9, 0x8f50,
0xfbef, 0xea66, 0xd8fd, 0xc974, 0x4204, 0x538d, 0x6116, 0x709f, 0x0420, 0x15a9, 0x2732, 0x36bb,
0xce4c, 0xdfc5, 0xed5e, 0xfcd7, 0x8868, 0x99e1, 0xab7a, 0xbaf3, 0x5285, 0x430c, 0x7197, 0x601e,
0x14a1, 0x0528, 0x37b3, 0x263a, 0xdecd, 0xcf44, 0xfddf, 0xec56, 0x98e9, 0x8960, 0xbbfb, 0xaa72,
0x6306, 0x728f, 0x4014, 0x519d, 0x2522, 0x34ab, 0x0630, 0x17b9, 0xef4e, 0xfec7, 0xcc5c, 0xddd5,
0xa96a, 0xb8e3, 0x8a78, 0x9bf1, 0x7387, 0x620e, 0x5095, 0x411c, 0x35a3, 0x242a, 0x16b1, 0x0738,
0xffcf, 0xee46, 0xdcdd, 0xcd54, 0xb9eb, 0xa862, 0x9af9, 0x8b70, 0x8408, 0x9581, 0xa71a, 0xb693,
0xc22c, 0xd3a5, 0xe13e, 0xf0b7, 0x0840, 0x19c9, 0x2b52, 0x3adb, 0x4e64, 0x5fed, 0x6d76, 0x7cff,
0x9489, 0x8500, 0xb79b, 0xa612, 0xd2ad, 0xc324, 0xf1bf, 0xe036, 0x18c1, 0x0948, 0x3bd3, 0x2a5a,
0x5ee5, 0x4f6c, 0x7df7, 0x6c7e, 0xa50a, 0xb483, 0x8618, 0x9791, 0xe32e, 0xf2a7, 0xc03c, 0xd1b5,
0x2942, 0x38cb, 0x0a50, 0x1bd9, 0x6f66, 0x7eef, 0x4c74, 0x5dfd, 0xb58b, 0xa402, 0x9699, 0x8710,
0xf3af, 0xe226, 0xd0bd, 0xc134, 0x39c3, 0x284a, 0x1ad1, 0x0b58, 0x7fe7, 0x6e6e, 0x5cf5, 0x4d7c,
0xc60c, 0xd785, 0xe51e, 0xf497, 0x8028, 0x91a1, 0xa33a, 0xb2b3, 0x4a44, 0x5bcd, 0x6956, 0x78df,
0x0c60, 0x1de9, 0x2f72, 0x3efb, 0xd68d, 0xc704, 0xf59f, 0xe416, 0x90a9, 0x8120, 0xb3bb, 0xa232,
0x5ac5, 0x4b4c, 0x79d7, 0x685e, 0x1ce1, 0x0d68, 0x3ff3, 0x2e7a, 0xe70e, 0xf687, 0xc41c, 0xd595,
0xa12a, 0xb0a3, 0x8238, 0x93b1, 0x6b46, 0x7acf, 0x4854, 0x59dd, 0x2d62, 0x3ceb, 0x0e70, 0x1ff9,
0xf78f, 0xe606, 0xd49d, 0xc514, 0xb1ab, 0xa022, 0x92b9, 0x8330, 0x7bc7, 0x6a4e, 0x58d5, 0x495c,
0x3de3, 0x2c6a, 0x1ef1, 0x0f78};
/**
* @brief CRC16 Caculation function
* @param[in] pchMessage : Data to Verify,
* @param[in] dwLength : Stream length = Data + checksum
* @param[in] wCRC : CRC16 init value(default : 0xFFFF)
* @return : CRC16 checksum
*/
uint16_t Get_CRC16_Check_Sum(const uint8_t * pchMessage, uint32_t dwLength, uint16_t wCRC)
{
uint8_t ch_data;
if (pchMessage == nullptr) return 0xFFFF;
while (dwLength--) {
ch_data = *pchMessage++;
(wCRC) =
((uint16_t)(wCRC) >> 8) ^ W_CRC_TABLE[((uint16_t)(wCRC) ^ (uint16_t)(ch_data)) & 0x00ff];
}
return wCRC;
}
/**
* @brief CRC16 Verify function
* @param[in] pchMessage : Data to Verify,
* @param[in] dwLength : Stream length = Data + checksum
* @return : True or False (CRC Verify Result)
*/
uint32_t Verify_CRC16_Check_Sum(const uint8_t * pchMessage, uint32_t dwLength)
{
uint16_t w_expected = 0;
if ((pchMessage == nullptr) || (dwLength <= 2)) return false;
w_expected = Get_CRC16_Check_Sum(pchMessage, dwLength - 2, CRC16_INIT);
return (
(w_expected & 0xff) == pchMessage[dwLength - 2] &&
((w_expected >> 8) & 0xff) == pchMessage[dwLength - 1]);
}
/**
* @brief Append CRC16 value to the end of the buffer
* @param[in] pchMessage : Data to Verify,
* @param[in] dwLength : Stream length = Data + checksum
* @return none
*/
void Append_CRC16_Check_Sum(uint8_t * pchMessage, uint32_t dwLength)
{
uint16_t w_crc = 0;
if ((pchMessage == nullptr) || (dwLength <= 2)) return;
w_crc = Get_CRC16_Check_Sum(reinterpret_cast<uint8_t *>(pchMessage), dwLength - 2, CRC16_INIT);
pchMessage[dwLength - 2] = (uint8_t)(w_crc & 0x00ff);
pchMessage[dwLength - 1] = (uint8_t)((w_crc >> 8) & 0x00ff);
}
} // namespace crc16

View File

@ -1,310 +0,0 @@
// Copyright (c) 2022 ChenJun
// Licensed under the Apache-2.0 License.
#include "rm_serial_driver/rm_serial_driver.hpp"
// ROS
#include <rclcpp/logging.hpp>
#include <rclcpp/qos.hpp>
#include <rclcpp/utilities.hpp>
#include <serial_driver/serial_driver.hpp>
// C++ system
#include <cstdint>
#include <functional>
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "rm_serial_driver/crc.hpp"
#include "rm_serial_driver/packet.hpp"
namespace rm_serial_driver
{
RMSerialDriver::RMSerialDriver(const rclcpp::NodeOptions & options)
: Node("rm_serial_driver", options),
owned_ctx_{new IoContext(2)},
serial_driver_{new drivers::serial_driver::SerialDriver(*owned_ctx_)}
{
RCLCPP_INFO(get_logger(), "Start RMSerialDriver!");
getParams();
// Create Publisher
timestamp_offset_ = this->declare_parameter("timestamp_offset", 0.0);
joint_state_pub_ = this->create_publisher<sensor_msgs::msg::JointState>(
"/joint_states", rclcpp::QoS(rclcpp::KeepLast(1)));
latency_pub_ = this->create_publisher<std_msgs::msg::Float64>("/latency", 10);
marker_pub_ = this->create_publisher<visualization_msgs::msg::Marker>("/aiming_point", 10);
// Detect parameter client
detector_param_client_ = std::make_shared<rclcpp::AsyncParametersClient>(this, "armor_detector");
try {
serial_driver_->init_port(device_name_, *device_config_);
if (!serial_driver_->port()->is_open()) {
serial_driver_->port()->open();
receive_thread_ = std::thread(&RMSerialDriver::receiveData, this);
}
} catch (const std::exception & ex) {
RCLCPP_ERROR(
get_logger(), "Error creating serial port: %s - %s", device_name_.c_str(), ex.what());
throw ex;
}
aiming_point_.header.frame_id = "odom";
aiming_point_.ns = "aiming_point";
aiming_point_.type = visualization_msgs::msg::Marker::SPHERE;
aiming_point_.action = visualization_msgs::msg::Marker::ADD;
aiming_point_.scale.x = aiming_point_.scale.y = aiming_point_.scale.z = 0.12;
aiming_point_.color.r = 1.0;
aiming_point_.color.g = 1.0;
aiming_point_.color.b = 1.0;
aiming_point_.color.a = 1.0;
aiming_point_.lifetime = rclcpp::Duration::from_seconds(0.1);
// Create Subscription
target_sub_ = this->create_subscription<auto_aim_interfaces::msg::Target>(
"/tracker/target", rclcpp::SensorDataQoS(),
std::bind(&RMSerialDriver::sendData, this, std::placeholders::_1));
}
RMSerialDriver::~RMSerialDriver()
{
if (receive_thread_.joinable()) {
receive_thread_.join();
}
if (serial_driver_->port()->is_open()) {
serial_driver_->port()->close();
}
if (owned_ctx_) {
owned_ctx_->waitForExit();
}
}
void RMSerialDriver::receiveData()
{
std::vector<uint8_t> header(1);
std::vector<uint8_t> data;
data.reserve(sizeof(ReceivePacket));
while (rclcpp::ok()) {
try {
serial_driver_->port()->receive(header);
if (header[0] == 0x5A) {
data.resize(sizeof(ReceivePacket) - 1);
serial_driver_->port()->receive(data);
data.insert(data.begin(), header[0]);
ReceivePacket packet = fromVector(data);
bool crc_ok =
crc16::Verify_CRC16_Check_Sum(reinterpret_cast<const uint8_t *>(&packet), sizeof(packet));
if (crc_ok) {
if (!initial_set_param_ || packet.detect_color != previous_receive_color_) {
RCLCPP_INFO(get_logger(), "Setting detect_color to %d...", packet.detect_color);
setParam(rclcpp::Parameter("detect_color", packet.detect_color));
previous_receive_color_ = packet.detect_color;
}
sensor_msgs::msg::JointState joint_state;
timestamp_offset_ = this->get_parameter("timestamp_offset").as_double();
joint_state.header.stamp =
this->now() + rclcpp::Duration::from_seconds(timestamp_offset_);
joint_state.name.push_back("pitch_joint");
joint_state.name.push_back("yaw_joint");
joint_state.position.push_back(-packet.pitch);
joint_state.position.push_back(packet.yaw);
joint_state_pub_->publish(joint_state);
if (abs(packet.aim_x) > 0.01) {
aiming_point_.header.stamp = this->now();
aiming_point_.pose.position.x = packet.aim_x;
aiming_point_.pose.position.y = packet.aim_y;
aiming_point_.pose.position.z = packet.aim_z;
marker_pub_->publish(aiming_point_);
}
} else {
RCLCPP_ERROR(get_logger(), "CRC error!");
}
} else {
RCLCPP_WARN_THROTTLE(get_logger(), *get_clock(), 20, "Invalid header: %02X", header[0]);
}
} catch (const std::exception & ex) {
RCLCPP_ERROR_THROTTLE(
get_logger(), *get_clock(), 20, "Error while receiving data: %s", ex.what());
reopenPort();
}
}
}
void RMSerialDriver::sendData(const auto_aim_interfaces::msg::Target::SharedPtr msg)
{
const static std::map<std::string, uint8_t> id_unit8_map{
{"", 0}, {"outpost", 0}, {"1", 1}, {"1", 1}, {"2", 2},
{"3", 3}, {"4", 4}, {"5", 5}, {"guard", 6}, {"base", 7}};
try {
SendPacket packet;
packet.tracking = msg->tracking;
packet.id = id_unit8_map.at(msg->id);
packet.armors_num = msg->armors_num;
packet.x = msg->position.x;
packet.y = msg->position.y;
packet.z = msg->position.z;
packet.yaw = msg->yaw;
packet.vx = msg->velocity.x;
packet.vy = msg->velocity.y;
packet.vz = msg->velocity.z;
packet.v_yaw = msg->v_yaw;
packet.r1 = msg->radius_1;
packet.r2 = msg->radius_2;
packet.dz = msg->dz;
crc16::Append_CRC16_Check_Sum(reinterpret_cast<uint8_t *>(&packet), sizeof(packet));
std::vector<uint8_t> data = toVector(packet);
serial_driver_->port()->send(data);
std_msgs::msg::Float64 latency;
latency.data = (this->now() - msg->header.stamp).seconds() * 1000.0;
RCLCPP_DEBUG_STREAM(get_logger(), "Total latency: " + std::to_string(latency.data) + "ms");
latency_pub_->publish(latency);
} catch (const std::exception & ex) {
RCLCPP_ERROR(get_logger(), "Error while sending data: %s", ex.what());
reopenPort();
}
}
void RMSerialDriver::getParams()
{
using FlowControl = drivers::serial_driver::FlowControl;
using Parity = drivers::serial_driver::Parity;
using StopBits = drivers::serial_driver::StopBits;
uint32_t baud_rate{};
auto fc = FlowControl::NONE;
auto pt = Parity::NONE;
auto sb = StopBits::ONE;
try {
device_name_ = declare_parameter<std::string>("device_name", "");
} catch (rclcpp::ParameterTypeException & ex) {
RCLCPP_ERROR(get_logger(), "The device name provided was invalid");
throw ex;
}
try {
baud_rate = declare_parameter<int>("baud_rate", 0);
} catch (rclcpp::ParameterTypeException & ex) {
RCLCPP_ERROR(get_logger(), "The baud_rate provided was invalid");
throw ex;
}
try {
const auto fc_string = declare_parameter<std::string>("flow_control", "");
if (fc_string == "none") {
fc = FlowControl::NONE;
} else if (fc_string == "hardware") {
fc = FlowControl::HARDWARE;
} else if (fc_string == "software") {
fc = FlowControl::SOFTWARE;
} else {
throw std::invalid_argument{
"The flow_control parameter must be one of: none, software, or hardware."};
}
} catch (rclcpp::ParameterTypeException & ex) {
RCLCPP_ERROR(get_logger(), "The flow_control provided was invalid");
throw ex;
}
try {
const auto pt_string = declare_parameter<std::string>("parity", "");
if (pt_string == "none") {
pt = Parity::NONE;
} else if (pt_string == "odd") {
pt = Parity::ODD;
} else if (pt_string == "even") {
pt = Parity::EVEN;
} else {
throw std::invalid_argument{"The parity parameter must be one of: none, odd, or even."};
}
} catch (rclcpp::ParameterTypeException & ex) {
RCLCPP_ERROR(get_logger(), "The parity provided was invalid");
throw ex;
}
try {
const auto sb_string = declare_parameter<std::string>("stop_bits", "");
if (sb_string == "1" || sb_string == "1.0") {
sb = StopBits::ONE;
} else if (sb_string == "1.5") {
sb = StopBits::ONE_POINT_FIVE;
} else if (sb_string == "2" || sb_string == "2.0") {
sb = StopBits::TWO;
} else {
throw std::invalid_argument{"The stop_bits parameter must be one of: 1, 1.5, or 2."};
}
} catch (rclcpp::ParameterTypeException & ex) {
RCLCPP_ERROR(get_logger(), "The stop_bits provided was invalid");
throw ex;
}
device_config_ =
std::make_unique<drivers::serial_driver::SerialPortConfig>(baud_rate, fc, pt, sb);
}
void RMSerialDriver::reopenPort()
{
RCLCPP_WARN(get_logger(), "Attempting to reopen port");
try {
if (serial_driver_->port()->is_open()) {
serial_driver_->port()->close();
}
serial_driver_->port()->open();
RCLCPP_INFO(get_logger(), "Successfully reopened port");
} catch (const std::exception & ex) {
RCLCPP_ERROR(get_logger(), "Error while reopening port: %s", ex.what());
if (rclcpp::ok()) {
rclcpp::sleep_for(std::chrono::seconds(1));
reopenPort();
}
}
}
void RMSerialDriver::setParam(const rclcpp::Parameter & param)
{
if (detector_param_client_->service_is_ready()) {
detector_param_client_->set_parameters(
{param},
[this, param](
const std::shared_future<std::vector<rcl_interfaces::msg::SetParametersResult>> & results) {
for (const auto & result : results.get()) {
if (!result.successful) {
RCLCPP_ERROR(get_logger(), "Failed to set parameter: %s", result.reason.c_str());
return;
}
}
RCLCPP_INFO(get_logger(), "Successfully set detect_color to %ld!", param.as_int());
initial_set_param_ = true;
});
} else {
RCLCPP_WARN(get_logger(), "Service not ready, skipping parameter set");
}
}
} // namespace rm_serial_driver
#include "rclcpp_components/register_node_macro.hpp"
// Register the component with class_loader.
// This acts as a sort of entry point, allowing the component to be discoverable when its library
// is being loaded into a running process.
RCLCPP_COMPONENTS_REGISTER_NODE(rm_serial_driver::RMSerialDriver)

View File

@ -1,41 +0,0 @@
FROM ros:humble-ros-base
# create workspace
RUN mkdir -p /ros_ws/src
WORKDIR /ros_ws/
# clone projects
RUN cd src && git clone https://github.com/chenjunnn/rm_auto_aim --depth=1 && \
git clone https://github.com/chenjunnn/ros2_mindvision_camera --depth=1 && \
git clone https://github.com/chenjunnn/ros2_hik_camera --depth=1 && \
git clone https://github.com/chenjunnn/rm_gimbal_description --depth=1 && \
git clone https://github.com/chenjunnn/rm_serial_driver --depth=1 && \
git clone https://github.com/chenjunnn/rm_vision --depth=1
# install dependencies and some tools
RUN apt-get update && rosdep install --from-paths src --ignore-src -r -y && \
apt-get install ros-humble-foxglove-bridge wget htop vim -y && \
rm -rf /var/lib/apt/lists/*
# setup zsh
RUN sh -c "$(wget -O- https://github.com/deluan/zsh-in-docker/releases/download/v1.1.2/zsh-in-docker.sh)" -- \
-t jispwoso -p git \
-p https://github.com/zsh-users/zsh-autosuggestions \
-p https://github.com/zsh-users/zsh-syntax-highlighting && \
chsh -s /bin/zsh && \
rm -rf /var/lib/apt/lists/*
# build
RUN . /opt/ros/humble/setup.sh && colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release
# setup .zshrc
RUN echo 'export TERM=xterm-256color\n\
source /ros_ws/install/setup.zsh\n\
eval "$(register-python-argcomplete3 ros2)"\n\
eval "$(register-python-argcomplete3 colcon)"\n'\
>> /root/.zshrc
# source entrypoint setup
RUN sed --in-place --expression \
'$isource "/ros_ws/install/setup.bash"' \
/ros_entrypoint.sh

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022 ChenJun
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,65 +0,0 @@
# rm_vision
<img src="docs/rm_vision.svg" alt="rm_vision" width="200" height="200">
## Overview
rm_vision 项目旨在为 RoboMaster 队伍提供一个规范、易用、鲁棒、高性能的视觉框架方案,为 RM 开源生态的建设添砖加瓦
如果本开源项目对于贵战队的视觉技术发展起到了实质性的帮助作用,请在机器人上贴上以下标签以助力该项目的推广,十分感激!
[<img src="docs/rm_vision_inside.svg" alt="rm_vision_inside" width="100" height="100">](docs/rm_vision_inside.svg)
QQ 交流群797203945
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
[![Build Status](https://github.com/chenjunnn/rm_vision/actions/workflows/ci.yml/badge.svg)](https://github.com/chenjunnn/rm_vision/actions/workflows/ci.yml)
## 包含项目
装甲板自动瞄准算法模块 https://github.com/chenjunnn/rm_auto_aim
MindVision 相机模块 https://github.com/chenjunnn/ros2_mindvision_camera
HikVision 相机模块 https://github.com/chenjunnn/ros2_hik_camera
机器人云台描述文件 https://github.com/chenjunnn/rm_gimbal_description
串口通讯模块 https://github.com/chenjunnn/rm_serial_driver
视觉算法仿真器 https://github.com/chenjunnn/rm_vision_simulator
## 通过 Docker 部署
拉取镜像
```
docker pull chenjunnn/rm_vision:lastest
```
构建开发容器
```
docker run -it --name rv_devel \
--privileged --network host \
-v /dev:/dev -v $HOME/.ros:/root/.ros -v ws:/ros_ws \
chenjunnn/rm_vision:lastest \
ros2 launch foxglove_bridge foxglove_bridge_launch.xml
```
构建运行容器
```
docker run -it --name rv_runtime \
--privileged --network host --restart always \
-v /dev:/dev -v $HOME/.ros:/root/.ros -v ws:/ros_ws \
chenjunnn/rm_vision:lastest \
ros2 launch rm_vision_bringup vision_bringup.launch.py
```
TBD
## 源码编译
TBD

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Created with Vectornator (http://vectornator.io/) -->
<svg height="100%" stroke-miterlimit="10" style="fill-rule:nonzero;clip-rule:evenodd;stroke-linecap:round;stroke-linejoin:round;" version="1.1" viewBox="0 0 1024 1024" width="100%" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:vectornator="http://vectornator.io" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<linearGradient gradientTransform="matrix(1 0 0 1 6.39488e-14 157.46)" gradientUnits="userSpaceOnUse" id="LinearGradient" x1="375.972" x2="944.796" y1="453.158" y2="453.158">
<stop offset="0" stop-color="#ff8b00"/>
<stop offset="1" stop-color="#ff5900"/>
</linearGradient>
</defs>
<g id="图层-2" vectornator:layerName="图层 2">
<path d="M79.2038 285.449L524.109 285.848C524.109 285.848 651.69 302.769 654.969 439.05C658.247 575.332 521.965 594.533 521.965 594.533L292.315 596.568L242.362 778.529L101.844 777.892L181.788 485.967L512.496 484.668C512.496 484.668 542.064 478.698 541.78 438.895C541.496 399.092 503.683 395.965 503.683 395.965L340.206 395.68L406.734 471.875L248.235 472.328L79.2038 285.449Z" fill="#5a5655" fill-rule="nonzero" opacity="1" stroke="none"/>
</g>
<g id="图层-3" vectornator:layerName="图层 3">
<path d="M375.972 608.813L535.485 608.74L576.68 655.417L788.749 378.975L944.796 378.807L592.763 842.427" fill="url(#LinearGradient)" fill-rule="nonzero" opacity="1" stroke="#5a5655" stroke-linecap="butt" stroke-linejoin="miter" stroke-width="0"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 7.9 KiB

View File

@ -1,16 +0,0 @@
cmake_minimum_required(VERSION 3.8)
project(rm_vision_bringup)
find_package(ament_cmake_auto REQUIRED)
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
set(ament_cmake_copyright_FOUND TRUE)
ament_lint_auto_find_test_dependencies()
endif()
ament_auto_package(
INSTALL_TO_SHARE
launch
config
)

View File

@ -1,20 +0,0 @@
image_width: 1280
image_height: 1024
camera_name: narrow_stereo
camera_matrix:
rows: 3
cols: 3
data: [1577.195796, 0.000000, 644.981961, 0.000000, 1578.500288, 508.789223, 0.000000, 0.000000, 1.000000]
distortion_model: plumb_bob
distortion_coefficients:
rows: 1
cols: 5
data: [-0.093325, 0.249059, -0.000222, 0.001072, 0.000000]
rectification_matrix:
rows: 3
cols: 3
data: [1.000000, 0.000000, 0.000000, 0.000000, 1.000000, 0.000000, 0.000000, 0.000000, 1.000000]
projection_matrix:
rows: 3
cols: 4
data: [1565.207764, 0.000000, 645.413233, 0.000000, 0.000000, 1567.246460, 508.153201, 0.000000, 0.000000, 0.000000, 1.000000, 0.000000]

View File

@ -1,9 +0,0 @@
camera: mv
odom2camera:
xyz: "\"0.135 -0.11 0.10\""
rpy: "\"0.0 0.0 0.0\""
detector_log_level: INFO
tracker_log_level: INFO
serial_log_level: INFO

View File

@ -1,47 +0,0 @@
/camera_node:
ros__parameters:
camera_info_url: package://rm_vision_bringup/config/camera_info.yaml
exposure_time: 2500
gain: 8.0
/serial_driver:
ros__parameters:
timestamp_offset: 0.006
device_name: /dev/ttyACM0
baud_rate: 115200
flow_control: none
parity: none
stop_bits: "1"
/armor_detector:
ros__parameters:
debug: false
detect_color: 0
binary_thres: 80
light.min_ratio: 0.1
armor.min_light_ratio: 0.75
classifier_threshold: 0.8
ignore_classes: ["negative","outpost"]
/armor_tracker:
ros__parameters:
target_frame: odom
max_armor_distance: 10.0
ekf:
sigma2_q_xyz: 0.05 #0.01
sigma2_q_yaw: 5.0 #1.0
sigma2_q_r: 80.0
r_xyz_factor: 4e-4
r_yaw: 5e-3
tracker:
max_match_distance: 0.25
max_match_yaw_diff: 0.85
tracking_thres: 5
lost_time_thres: 1.0

View File

@ -1,32 +0,0 @@
import os
import yaml
from ament_index_python.packages import get_package_share_directory
from launch.substitutions import Command
from launch_ros.actions import Node
launch_params = yaml.safe_load(open(os.path.join(
get_package_share_directory('rm_vision_bringup'), 'config', 'launch_params.yaml')))
robot_description = Command(['xacro ', os.path.join(
get_package_share_directory('rm_gimbal_description'), 'urdf', 'rm_gimbal.urdf.xacro'),
' xyz:=', launch_params['odom2camera']['xyz'], ' rpy:=', launch_params['odom2camera']['rpy']])
robot_state_publisher = Node(
package='robot_state_publisher',
executable='robot_state_publisher',
parameters=[{'robot_description': robot_description,
'publish_frequency': 1000.0}]
)
node_params = os.path.join(
get_package_share_directory('rm_vision_bringup'), 'config', 'node_params.yaml')
tracker_node = Node(
package='armor_tracker',
executable='armor_tracker_node',
output='both',
emulate_tty=True,
parameters=[node_params],
ros_arguments=['--log-level', 'armor_tracker:='+launch_params['tracker_log_level']],
)

View File

@ -1,27 +0,0 @@
import os
import sys
from ament_index_python.packages import get_package_share_directory
sys.path.append(os.path.join(get_package_share_directory('rm_vision_bringup'), 'launch'))
def generate_launch_description():
from common import launch_params, robot_state_publisher, node_params, tracker_node
from launch_ros.actions import Node
from launch import LaunchDescription
detector_node = Node(
package='armor_detector',
executable='armor_detector_node',
emulate_tty=True,
output='both',
parameters=[node_params],
arguments=['--ros-args', '--log-level',
'armor_detector:='+launch_params['detector_log_level']],
)
return LaunchDescription([
robot_state_publisher,
detector_node,
tracker_node,
])

View File

@ -1,82 +0,0 @@
import os
import sys
from ament_index_python.packages import get_package_share_directory
sys.path.append(os.path.join(get_package_share_directory('rm_vision_bringup'), 'launch'))
def generate_launch_description():
from common import node_params, launch_params, robot_state_publisher, tracker_node
from launch_ros.descriptions import ComposableNode
from launch_ros.actions import ComposableNodeContainer, Node
from launch.actions import TimerAction, Shutdown
from launch import LaunchDescription
def get_camera_node(package, plugin):
return ComposableNode(
package=package,
plugin=plugin,
name='camera_node',
parameters=[node_params],
extra_arguments=[{'use_intra_process_comms': True}]
)
def get_camera_detector_container(camera_node):
return ComposableNodeContainer(
name='camera_detector_container',
namespace='',
package='rclcpp_components',
executable='component_container',
composable_node_descriptions=[
camera_node,
ComposableNode(
package='armor_detector',
plugin='rm_auto_aim::ArmorDetectorNode',
name='armor_detector',
parameters=[node_params],
extra_arguments=[{'use_intra_process_comms': True}]
)
],
output='screen',
emulate_tty=True,
ros_arguments=['--ros-args', '--log-level',
'armor_detector:='+launch_params['detector_log_level']],
on_exit=Shutdown(),
)
hik_camera_node = get_camera_node('hik_camera', 'hik_camera::HikCameraNode')
mv_camera_node = get_camera_node('mindvision_camera', 'mindvision_camera::MVCameraNode')
if (launch_params['camera'] == 'hik'):
cam_detector = get_camera_detector_container(hik_camera_node)
elif (launch_params['camera'] == 'mv'):
cam_detector = get_camera_detector_container(mv_camera_node)
serial_driver_node = Node(
package='rm_serial_driver',
executable='rm_serial_driver_node',
name='serial_driver',
output='both',
emulate_tty=True,
parameters=[node_params],
on_exit=Shutdown(),
ros_arguments=['--ros-args', '--log-level',
'serial_driver:='+launch_params['serial_log_level']],
)
delay_serial_node = TimerAction(
period=1.5,
actions=[serial_driver_node],
)
delay_tracker_node = TimerAction(
period=2.0,
actions=[tracker_node],
)
return LaunchDescription([
robot_state_publisher,
cam_detector,
delay_serial_node,
delay_tracker_node,
])

View File

@ -1,18 +0,0 @@
<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
<name>rm_vision_bringup</name>
<version>0.0.0</version>
<description>TODO: Package description</description>
<maintainer email="chen.junn@outlook.com">chenjun</maintainer>
<license>TODO: License declaration</license>
<buildtool_depend>ament_cmake</buildtool_depend>
<depend>rm_auto_aim</depend>
<depend>rm_serial_driver</depend>
<export>
<build_type>ament_cmake</build_type>
</export>
</package>

View File

@ -1,18 +0,0 @@
---
Language: Cpp
BasedOnStyle: Google
AccessModifierOffset: -2
AlignAfterOpenBracket: AlwaysBreak
BraceWrapping:
AfterClass: true
AfterFunction: true
AfterNamespace: true
AfterStruct: true
BreakBeforeBraces: Custom
ColumnLimit: 100
ConstructorInitializerIndentWidth: 0
ContinuationIndentWidth: 2
DerivePointerAlignment: false
PointerAlignment: Middle
ReflowComments: false

View File

@ -1,55 +0,0 @@
---
Checks: '-*,
performance-*,
-performance-unnecessary-value-param,
llvm-namespace-comment,
modernize-redundant-void-arg,
modernize-use-nullptr,
modernize-use-default,
modernize-use-override,
modernize-loop-convert,
modernize-make-shared,
modernize-make-unique,
misc-unused-parameters,
readability-named-parameter,
readability-redundant-smartptr-get,
readability-redundant-string-cstr,
readability-simplify-boolean-expr,
readability-container-size-empty,
readability-identifier-naming,
'
HeaderFilterRegex: ''
AnalyzeTemporaryDtors: false
CheckOptions:
- key: llvm-namespace-comment.ShortNamespaceLines
value: '10'
- key: llvm-namespace-comment.SpacesBeforeComments
value: '2'
- key: misc-unused-parameters.StrictMode
value: '1'
- key: readability-braces-around-statements.ShortStatementLines
value: '2'
# type names
- key: readability-identifier-naming.ClassCase
value: CamelCase
- key: readability-identifier-naming.EnumCase
value: CamelCase
- key: readability-identifier-naming.UnionCase
value: CamelCase
# method names
- key: readability-identifier-naming.MethodCase
value: camelBack
# variable names
- key: readability-identifier-naming.VariableCase
value: lower_case
- key: readability-identifier-naming.ClassMemberSuffix
value: '_'
# const static or global variables are UPPER_CASE
- key: readability-identifier-naming.EnumConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.StaticConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.ClassConstantCase
value: UPPER_CASE
- key: readability-identifier-naming.GlobalVariableCase
value: UPPER_CASE

View File

@ -1,67 +0,0 @@
cmake_minimum_required(VERSION 3.8)
project(hik_camera)
## Use C++14
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
## By adding -Wall and -Werror, the compiler does not ignore warnings anymore,
## enforcing cleaner code.
add_definitions(-Wall -Werror)
## Export compile commands for clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
find_package(ament_cmake_auto REQUIRED)
ament_auto_find_build_dependencies()
ament_auto_add_library(${PROJECT_NAME} SHARED
src/hik_camera_node.cpp
)
target_include_directories(${PROJECT_NAME} PUBLIC hikSDK/include)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64")
target_link_directories(${PROJECT_NAME} PUBLIC hikSDK/lib/amd64)
install(
DIRECTORY hikSDK/lib/amd64/
DESTINATION lib
)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64")
target_link_directories(${PROJECT_NAME} PUBLIC hikSDK/lib/arm64)
install(
DIRECTORY hikSDK/lib/arm64/
DESTINATION lib
)
else()
message(FATAL_ERROR "Unsupport host system architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}!")
endif()
target_link_libraries(${PROJECT_NAME}
FormatConversion
MediaProcess
MvCameraControl
MVRender
MvUsb3vTL
)
rclcpp_components_register_node(${PROJECT_NAME}
PLUGIN hik_camera::HikCameraNode
EXECUTABLE ${PROJECT_NAME}_node
)
if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
list(APPEND AMENT_LINT_AUTO_EXCLUDE
ament_cmake_copyright
ament_cmake_cpplint
ament_cmake_uncrustify
)
ament_lint_auto_find_test_dependencies()
endif()
ament_auto_package(
INSTALL_TO_SHARE
config
launch
)

View File

@ -1,14 +0,0 @@
# ros2_hik_camera
A ROS2 packge for Hikvision USB3.0 industrial camera
## Usage
```
ros2 launch hik_camera hik_camera.launch.py
```
## Params
- exposure_time
- gain

View File

@ -1,20 +0,0 @@
image_width: 640
image_height: 480
camera_name: narrow_stereo
camera_matrix:
rows: 3
cols: 3
data: [1320.127401, 0.000000, 609.902940, 0.000000, 1329.050651, 457.308236, 0.000000, 0.000000, 1.000000]
distortion_model: plumb_bob
distortion_coefficients:
rows: 1
cols: 5
data: [-0.034135, 0.131210, -0.015866, -0.004433, 0.000000]
rectification_matrix:
rows: 3
cols: 3
data: [1.000000, 0.000000, 0.000000, 0.000000, 1.000000, 0.000000, 0.000000, 0.000000, 1.000000]
projection_matrix:
rows: 3
cols: 4
data: [1337.148438, 0.000000, 605.640265, 0.000000, 0.000000, 1333.494019, 444.618912, 0.000000, 0.000000, 0.000000, 1.000000, 0.000000]

View File

@ -1,6 +0,0 @@
/hik_camera:
ros__parameters:
camera_name: narrow_stereo
exposure_time: 6000
gain: 32.0

Some files were not shown because too many files have changed in this diff Show More