问题遇到的现象和发生背景
利用solvepnp 求解出平移矩阵,然后求解距离和角度误差太大
问题相关代码,请勿粘贴截图
#include <opencv2/opencv.hpp>
#include <iostream>
#include<string>
#include<cmath>
using namespace cv;
using namespace std;
#define PI 3.1415926
// 目标三维坐标
cv::Mat rot; // 旋转矩阵
cv::Mat position_in_camera; // 偏移向量
cv::Mat position_in_ptz; // 目标在PTZ坐标中的位置
double pit_ref; // 俯仰角
double yaw_ref; // 偏航角
cv::Mat rot_camera2ptz; // 旋转矩阵(相机坐标转换到PTZ坐标)
cv::Mat trans_camera2ptz; // 偏移向量(相机坐标转换到PTZ坐标)
double offset_y_barrel_ptz; // 云台中心与枪管的偏移量
Mat cam_matrix = (Mat_<double>(3, 3) << 1127.53102692435, 0, 1010.04554630085,
0, 1173.30393487712, 538.275986590883,
0, 0, 1); // 内参矩阵
Mat distortion_coeff = (Mat_<double>(5, 1) << 0.00891802834654325, -0.0127666161836838, -0.00112607891848638, 0.00172349451080566, -0.0136946445789243); // 畸变系数
double dis;
Mat img;
float width_target = 160;
float height_target = 70;
vector<vector<int >> light_points;//光源经过的点
//kalman k1;//创建卡尔曼对象
int Width = 0;
int Height = 0;
vector<float> realS;//光源到摄像头的实际距离
//hmin, smin, vmin, hmax, smax, vmax
vector<vector<int>> light_colors//识别光的颜色
{
{30,0,245,150,10,255}//手机光
};
void getDistanceDanmu(RotatedRect armor, double& dist)
{
/*cv::RotatedRect armor = cv::minAreaRect(armor_rect);*/
float p_w = std::max(armor.size.width, armor.size.height);
float p_h = std::min(armor.size.width, armor.size.height);
float fx_w = cam_matrix.at<double>(0, 0) * width_target;
float fx_h = cam_matrix.at<double>(1, 1) * height_target;
float dist_w = fx_w / p_w;
float dist_h = fx_h / p_h;
dist = (dist_w + dist_h) / 2;
}
void adjustPTZ2Barrel(const cv::Mat& pos_in_ptz,
double& angle_x, double& angle_y
)
{
const double* _xyz = (const double*)pos_in_ptz.data;
angle_x = atan2(_xyz[0], dis);
double xyz[3] = { _xyz[0], _xyz[1], dis};
double alpha = 0.0, theta = 0.0;
//alpha = asin(offset_y_barrel_ptz / sqrt(xyz[1] * xyz[1] + xyz[2] * xyz[2]));//???????
if (xyz[1] < 0)
{
theta = atan(-xyz[1] / xyz[2]);
angle_y = -alpha - theta;
}
else
{
theta = atan(xyz[1] / xyz[2]);
angle_y = -alpha + theta;
}
angle_x = angle_x * 180.0 / PI;
angle_y = angle_y * 180.0 / PI;//弧度转角度
}
void drawRotatedRect(cv::Mat& img, const cv::RotatedRect& rect, const cv::Scalar& color, int thickness)
{
cv::Point2f Vertex[4];
rect.points(Vertex);
for (int i = 0; i < 4; i++)
{
cv::line(img, Vertex[i], Vertex[(i + 1) % 4], color, thickness);
}
}
void getTarget2dPoinstion(const cv::RotatedRect& rect,
std::vector<cv::Point2f>& target2d)
{
cv::Point2f vertices[4];
rect.points(vertices);
cv::Point2f lu, ld, ru, rd;
std::sort(vertices, vertices + 4, [](const cv::Point2f& p1, const cv::Point2f& p2) { return p1.x < p2.x; });
if (vertices[0].y < vertices[1].y)
{
lu = vertices[0];
ld = vertices[1];
}
else
{
lu = vertices[1];
ld = vertices[0];
}
if (vertices[2].y < vertices[3].y)
{
ru = vertices[2];
rd = vertices[3];
}
else
{
ru = vertices[3];
rd = vertices[2];
}
target2d.clear();
target2d.push_back(lu);
target2d.push_back(ru);
target2d.push_back(rd);
target2d.push_back(ld);
}
int main(int argc, char* argv[])
{
VideoCapture cap(1);
Mat frame;
Mat mask;
while (true)
{
cap.read(img);
//镜像
/* cvtColor(img, img, COLOR_BGR2GRAY);*/
cvtColor(img, frame, COLOR_BGR2HSV);
inRange(frame, Scalar(30, 0, 245), Scalar(150, 10, 255), mask);
vector<vector<Point>> contours;
findContours(mask, contours, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
for (int i = 0; i < contours.size(); i++)
{
RotatedRect RRect = minAreaRect(contours[i]);
int area = RRect.size.area();//得到面积
;
if (area > 500)//过滤噪声
{
drawRotatedRect(img, RRect, cv::Scalar(255, 0, 0), 2);//画出最小旋转矩形
Mat rot_vector, translation_vector;
vector<Point2f> object2d_point;
getTarget2dPoinstion(RRect, object2d_point);//输入旋转矩形,返回坐标点
float half_x = 7.20f / 2.0f;
float half_y =7.20f / 2.0f;//数据格式很重要
std::vector<cv::Point3f> point3d;
point3d.push_back(cv::Point3f(-half_x,half_y, 0));
point3d.push_back(cv::Point3f(half_x, half_y, 0));
point3d.push_back(cv::Point3f(half_x, -half_y, 0));
point3d.push_back(cv::Point3f(-half_x, -half_y, 0));
solvePnP(point3d, object2d_point, cam_matrix, distortion_coeff, rot_vector, translation_vector); //自瞄
Mat rotM = Mat::eye(3, 3, CV_64F);
Mat rotT = Mat::eye(3, 3, CV_64F);
Rodrigues(rot_vector, rotM); //将旋转向量变换成旋转矩阵
Rodrigues(translation_vector, rotT);
circle(img, object2d_point[0], 8, Scalar(0, 255, 0), -1);
circle(img, object2d_point[1], 16, Scalar(0, 255, 0), -1);
circle(img, object2d_point[2], 24, Scalar(0, 255, 0), -1);
circle(img, object2d_point[3], 32, Scalar(0, 255, 0), -1);
double tx = translation_vector.at<double>(0, 0);
double ty = translation_vector.at<double>(1, 0);
double tz = translation_vector.at<double>(2, 0);
dis = sqrt(tz*tz+ty*ty+tx*tx) ;
double dist;
//getDistanceDanmu(RRect, dist);
/*adjustPTZ2Barrel(translation_vector, yaw_ref, pit_ref);*/
double tan_pitch = ty / sqrt(tx * tx + tz * tz);
double tan_yaw = tx / tz;
pit_ref = -atan(tan_pitch) * 180 / CV_PI;
yaw_ref = atan(tan_yaw) * 180 / CV_PI;
cout << dis<< endl;
cout << translation_vector << endl;
cout <<"俯仰角"<< pit_ref << endl <<"偏航角"<< yaw_ref << endl;; // 俯仰角
}
}
imshow("image", img);
waitKey(10);
}
}
运行结果及报错内容
debug的话用微信二维码就可以了,或者手机把屏幕开到最亮,但要记得根据图片尺寸调世界坐标
坐标,距离,以及角度误差都很大,搞了好久都不知原因在哪?
xy不知为何为负数,z坐标差距太大。
请问距离是xyz平方和开根号吗?如果是的话,为何我的误差这么大。
我的解答思路和尝试过的方法
尝试过多种办法,距离和角度分别有两种求解方法,一种直接一种函数。
我想要达到的结果
能不能请大家帮我看看我问题出在哪里,让我能够减少误差,成功输出.
成功解决,必有悬赏!