优化代码对齐,Review和调整部分注释
parent
ed6a09bf8b
commit
ce2f195559
|
@ -30,8 +30,8 @@ private:
|
|||
rknn_input_output_num io_num;
|
||||
|
||||
int channel = 3;
|
||||
int width = 0;
|
||||
int height = 0;
|
||||
int width = 0;
|
||||
int height = 0;
|
||||
|
||||
rknn_input inputs[1];
|
||||
|
||||
|
|
|
@ -4,59 +4,58 @@
|
|||
|
||||
unsigned char * Detection::load_data(FILE *fp, size_t ofst, size_t sz)
|
||||
{
|
||||
unsigned char *data;
|
||||
int ret;
|
||||
unsigned char *data;
|
||||
int ret;
|
||||
|
||||
data = NULL;
|
||||
data = NULL;
|
||||
|
||||
if (NULL == fp)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
if (NULL == fp)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
|
||||
ret = fseek(fp, ofst, SEEK_SET);
|
||||
if (ret != 0)
|
||||
{
|
||||
printf("blob seek failure.\n");
|
||||
return NULL;
|
||||
}
|
||||
ret = fseek(fp, ofst, SEEK_SET);
|
||||
if (ret != 0)
|
||||
{
|
||||
printf("blob seek failure.\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
data = (unsigned char *)malloc(sz);
|
||||
if (data == NULL)
|
||||
{
|
||||
printf("buffer malloc failure.\n");
|
||||
return NULL;
|
||||
}
|
||||
ret = fread(data, 1, sz, fp);
|
||||
return data;
|
||||
data = (unsigned char *)malloc(sz);
|
||||
if (data == NULL)
|
||||
{
|
||||
printf("buffer malloc failure.\n");
|
||||
return NULL;
|
||||
}
|
||||
ret = fread(data, 1, sz, fp);
|
||||
return data;
|
||||
}
|
||||
|
||||
unsigned char * Detection::load_model(const char *filename, int *model_size)
|
||||
{
|
||||
FILE *fp;
|
||||
unsigned char *data;
|
||||
FILE *fp;
|
||||
unsigned char *data;
|
||||
|
||||
fp = fopen(filename, "rb");
|
||||
if (NULL == fp)
|
||||
{
|
||||
printf("Open file %s failed.\n", filename);
|
||||
return NULL;
|
||||
}
|
||||
fp = fopen(filename, "rb");
|
||||
if (NULL == fp)
|
||||
{
|
||||
printf("Open file %s failed.\n", filename);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
fseek(fp, 0, SEEK_END);
|
||||
int size = ftell(fp);
|
||||
fseek(fp, 0, SEEK_END);
|
||||
int size = ftell(fp);
|
||||
|
||||
data = load_data(fp, 0, size);
|
||||
data = load_data(fp, 0, size);
|
||||
|
||||
fclose(fp);
|
||||
fclose(fp);
|
||||
|
||||
*model_size = size;
|
||||
return data;
|
||||
*model_size = size;
|
||||
return data;
|
||||
}
|
||||
|
||||
Detection::Detection()
|
||||
{
|
||||
|
||||
/* Create the neural network */
|
||||
printf("Loading mode...\n");
|
||||
int model_data_size = 0;
|
||||
|
@ -64,21 +63,21 @@ Detection::Detection()
|
|||
ret = rknn_init(&ctx, model_data, model_data_size, 0, NULL);
|
||||
if (ret < 0)
|
||||
{
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
}
|
||||
|
||||
rknn_sdk_version version;
|
||||
ret = rknn_query(ctx, RKNN_QUERY_SDK_VERSION, &version, sizeof(rknn_sdk_version));
|
||||
if (ret < 0)
|
||||
{
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
}
|
||||
printf("sdk version: %s driver version: %s\n", version.api_version, version.drv_version);
|
||||
|
||||
ret = rknn_query(ctx, RKNN_QUERY_IN_OUT_NUM, &io_num, sizeof(io_num));
|
||||
if (ret < 0)
|
||||
{
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
}
|
||||
printf("model input num: %d, output num: %d\n", io_num.n_input, io_num.n_output);
|
||||
|
||||
|
@ -86,12 +85,12 @@ Detection::Detection()
|
|||
memset(input_attrs, 0, sizeof(input_attrs));
|
||||
for (int i = 0; i < io_num.n_input; i++)
|
||||
{
|
||||
input_attrs[i].index = i;
|
||||
ret = rknn_query(ctx, RKNN_QUERY_INPUT_ATTR, &(input_attrs[i]), sizeof(rknn_tensor_attr));
|
||||
if (ret < 0)
|
||||
{
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
}
|
||||
input_attrs[i].index = i;
|
||||
ret = rknn_query(ctx, RKNN_QUERY_INPUT_ATTR, &(input_attrs[i]), sizeof(rknn_tensor_attr));
|
||||
if (ret < 0)
|
||||
{
|
||||
printf("rknn_init error ret=%d\n", ret);
|
||||
}
|
||||
// dump_tensor_attr(&(input_attrs[i]));
|
||||
}
|
||||
|
||||
|
@ -99,35 +98,34 @@ Detection::Detection()
|
|||
memset(output_attrs, 0, sizeof(output_attrs));
|
||||
for (int i = 0; i < io_num.n_output; i++)
|
||||
{
|
||||
output_attrs[i].index = i;
|
||||
ret = rknn_query(ctx, RKNN_QUERY_OUTPUT_ATTR, &(output_attrs[i]), sizeof(rknn_tensor_attr));
|
||||
output_attrs[i].index = i;
|
||||
ret = rknn_query(ctx, RKNN_QUERY_OUTPUT_ATTR, &(output_attrs[i]), sizeof(rknn_tensor_attr));
|
||||
// dump_tensor_attr(&(output_attrs[i]));
|
||||
}
|
||||
|
||||
for (int i = 0; i < io_num.n_output; ++i)
|
||||
{
|
||||
out_scales.push_back(output_attrs[i].scale);
|
||||
out_zps.push_back(output_attrs[i].zp);
|
||||
out_scales.push_back(output_attrs[i].scale);
|
||||
out_zps.push_back(output_attrs[i].zp);
|
||||
}
|
||||
|
||||
if (input_attrs[0].fmt == RKNN_TENSOR_NCHW)
|
||||
{
|
||||
printf("model is NCHW input fmt\n");
|
||||
channel = input_attrs[0].dims[1];
|
||||
height = input_attrs[0].dims[2];
|
||||
width = input_attrs[0].dims[3];
|
||||
printf("model is NCHW input fmt\n");
|
||||
channel = input_attrs[0].dims[1];
|
||||
height = input_attrs[0].dims[2];
|
||||
width = input_attrs[0].dims[3];
|
||||
}
|
||||
else
|
||||
{
|
||||
printf("model is NHWC input fmt\n");
|
||||
height = input_attrs[0].dims[1];
|
||||
width = input_attrs[0].dims[2];
|
||||
channel = input_attrs[0].dims[3];
|
||||
printf("model is NHWC input fmt\n");
|
||||
height = input_attrs[0].dims[1];
|
||||
width = input_attrs[0].dims[2];
|
||||
channel = input_attrs[0].dims[3];
|
||||
}
|
||||
|
||||
printf("model input height=%d, width=%d, channel=%d\n", height, width, channel);
|
||||
|
||||
|
||||
memset(inputs, 0, sizeof(inputs));
|
||||
inputs[0].index = 0;
|
||||
inputs[0].type = RKNN_TENSOR_UINT8;
|
||||
|
@ -135,8 +133,9 @@ Detection::Detection()
|
|||
inputs[0].fmt = RKNN_TENSOR_NHWC;
|
||||
inputs[0].pass_through = 0;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// 这个地方采用直接传值,会影响性能
|
||||
detect_result_group_t Detection::outputParse(cv::Mat netInputImg)
|
||||
{
|
||||
|
||||
|
@ -144,7 +143,7 @@ detect_result_group_t Detection::outputParse(cv::Mat netInputImg)
|
|||
|
||||
if (!orig_img.data)
|
||||
{
|
||||
printf("no image\n");
|
||||
printf("no image\n");
|
||||
}
|
||||
cv::Mat img;
|
||||
cv::cvtColor(orig_img, img, cv::COLOR_BGR2RGB);
|
||||
|
@ -157,24 +156,25 @@ detect_result_group_t Detection::outputParse(cv::Mat netInputImg)
|
|||
memset(&pads, 0, sizeof(BOX_RECT));
|
||||
cv::Size target_size(width, height);
|
||||
cv::Mat resized_img(target_size.height, target_size.width, CV_8UC3);
|
||||
// 计算缩放比例
|
||||
|
||||
// 需要将输入图像分辨率转换到检测模型指定的分辨率
|
||||
float scale_w = (float)target_size.width / img.cols;
|
||||
float scale_h = (float)target_size.height / img.rows;
|
||||
|
||||
if (img_width != width || img_height != height)
|
||||
{
|
||||
// printf("resize image with letterbox\n");
|
||||
float min_scale = std::min(scale_w, scale_h);
|
||||
scale_w = min_scale;
|
||||
scale_h = min_scale;
|
||||
letterbox(img, resized_img, pads, min_scale, target_size);
|
||||
// 保存预处理图片
|
||||
// cv::imwrite("letterbox_input.jpg", resized_img);
|
||||
inputs[0].buf = resized_img.data;
|
||||
// printf("resize image with letterbox\n");
|
||||
float min_scale = std::min(scale_w, scale_h);
|
||||
scale_w = min_scale;
|
||||
scale_h = min_scale;
|
||||
letterbox(img, resized_img, pads, min_scale, target_size);
|
||||
// 保存预处理图片
|
||||
// cv::imwrite("letterbox_input.jpg", resized_img);
|
||||
inputs[0].buf = resized_img.data;
|
||||
}
|
||||
else
|
||||
{
|
||||
inputs[0].buf = img.data;
|
||||
inputs[0].buf = img.data;
|
||||
}
|
||||
|
||||
rknn_inputs_set(ctx, io_num.n_input, inputs);
|
||||
|
@ -183,7 +183,7 @@ detect_result_group_t Detection::outputParse(cv::Mat netInputImg)
|
|||
memset(outputs, 0, sizeof(outputs));
|
||||
for (int i = 0; i < io_num.n_output; i++)
|
||||
{
|
||||
outputs[i].want_float = 0;
|
||||
outputs[i].want_float = 0;
|
||||
}
|
||||
|
||||
// 执行推理
|
||||
|
|
|
@ -39,7 +39,6 @@ void *ranging(void *args) // ranging线程
|
|||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
|
||||
pthread_t tids[1]; // 执行ranging线程
|
||||
int ret = pthread_create(&tids[0], NULL, ranging, NULL);
|
||||
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
#include <opencv2/opencv.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
#include <iostream>
|
||||
#include <stdio.h>
|
||||
#include "ranging.h"
|
||||
#include <math.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include <string.h>
|
||||
#include <sys/time.h>
|
||||
#include <dlfcn.h>
|
||||
|
||||
// OpenCV相关
|
||||
#include "opencv2/core.hpp"
|
||||
#include "opencv2/imgcodecs.hpp"
|
||||
#include "opencv2/imgproc.hpp"
|
||||
#include <opencv2/opencv.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
#include <sys/stat.h>
|
||||
#include <sys/types.h>
|
||||
|
@ -19,19 +20,22 @@
|
|||
#include <vector>
|
||||
#include <tuple>
|
||||
#include <string>
|
||||
#include <string.h>
|
||||
#include <algorithm>
|
||||
|
||||
|
||||
// 上面重复无用的头文件应该删除
|
||||
// #include <ros/ros.h>
|
||||
|
||||
|
||||
using namespace cv;
|
||||
void Ranging::rectifyImage(Mat &oriImgL, Mat &oriImgR) //重映射函数
|
||||
|
||||
// 重映射函数
|
||||
void Ranging::rectifyImage(Mat &oriImgL, Mat &oriImgR)
|
||||
{
|
||||
remap(oriImgL, oriImgL, mapX1, mapX2, cv::INTER_LINEAR);
|
||||
remap(oriImgR, oriImgR, mapY1, mapY2, cv::INTER_LINEAR);
|
||||
}
|
||||
|
||||
std::vector<float> Ranging::pic2cam(int u, int v) //像素坐标转相机坐标
|
||||
// 像素坐标转相机坐标
|
||||
std::vector<float> Ranging::pic2cam(int u, int v)
|
||||
{
|
||||
//(u,v)->(x,y)"(loc[0],loc[1])"
|
||||
std::vector<float> loc;
|
||||
|
@ -40,35 +44,46 @@ std::vector<float> Ranging::pic2cam(int u, int v) //像素坐标转相机坐标
|
|||
return loc;
|
||||
}
|
||||
|
||||
std::vector<int> Ranging::muban(Mat &left_image, Mat &right_image, const int *coordinates) //模板匹配
|
||||
// 模板匹配
|
||||
std::vector<int> Ranging::muban(Mat &left_image, Mat &right_image, const int *coordinates)
|
||||
{
|
||||
int x1 = coordinates[0], y1 = coordinates[1], x2 = coordinates[2], y2 = coordinates[3];
|
||||
Mat tpl = right_image.rowRange(max(y1 - 2, 0), min(y2 + 2, 479)).colRange(x1, x2); //获取目标框
|
||||
Mat target = left_image.rowRange(max(y1 - 20, 0), min(y2 + 20, 479)).colRange(0, 639); //待匹配图像,极线约束,只需要同水平区域
|
||||
// 获取目标框
|
||||
Mat tpl = right_image.rowRange(max(y1 - 2, 0), min(y2 + 2, 479)).colRange(x1, x2);
|
||||
// 待匹配图像,极线约束,只需要同水平区域
|
||||
Mat target = left_image.rowRange(max(y1 - 20, 0), min(y2 + 20, 479)).colRange(0, 639);
|
||||
int th = tpl.rows, tw = tpl.cols;
|
||||
Mat result;
|
||||
|
||||
matchTemplate(target, tpl, result, TM_CCOEFF_NORMED); //匹配方法:归一化相关系数即零均值归一化互相关
|
||||
// 匹配方法:归一化相关系数即零均值归一化互相关
|
||||
matchTemplate(target, tpl, result, TM_CCOEFF_NORMED);
|
||||
double minVal, maxVal;
|
||||
Point minLoc, maxLoc;
|
||||
minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc); //得到匹配点坐标
|
||||
// 得到匹配点坐标
|
||||
minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc);
|
||||
Point tl = maxLoc, br;
|
||||
// 这个地方应该修改为宏
|
||||
// 转为像素坐标系 640 - 1
|
||||
br.x = min(maxLoc.x + tw, 639);
|
||||
// 转为像素坐标系 480 - 1
|
||||
br.y = min(maxLoc.y + th, 479);
|
||||
|
||||
br.x = min(maxLoc.x + tw, 639); //转为像素坐标系
|
||||
br.y = min(maxLoc.y + th, 479); //转为像素坐标系
|
||||
////展示匹配结果
|
||||
// br.x = min(maxLoc.x + tw, 319);
|
||||
// br.y = min(maxLoc.y + th, 239);
|
||||
//rectangle(target, tl, br, (0, 255, 0), 3);
|
||||
//imshow("match-", target);
|
||||
//waitKey(2);
|
||||
|
||||
/*
|
||||
// 展示匹配结果
|
||||
br.x = min(maxLoc.x + tw, 319);
|
||||
br.y = min(maxLoc.y + th, 239);
|
||||
rectangle(target, tl, br, (0, 255, 0), 3);
|
||||
imshow("match-", target);
|
||||
waitKey(2);
|
||||
*/
|
||||
|
||||
std::vector<int> maxloc;
|
||||
maxloc.push_back(maxLoc.x);
|
||||
maxloc.push_back(maxLoc.y);
|
||||
return maxloc;
|
||||
}
|
||||
|
||||
// 这个函数暂时不知道作用
|
||||
void Ranging::horizon_estimate(Mat& img, Mat& bboxs,int k)
|
||||
{
|
||||
//保证摄像头与地面平行
|
||||
|
@ -138,49 +153,70 @@ void Ranging::getInfo(Mat &imgL, Mat &imgR, Mat &detBoxes, Mat &info)
|
|||
Mat imgGrayL, imgGrayR;
|
||||
cvtColor(imgL, imgGrayL, COLOR_BGR2GRAY);
|
||||
cvtColor(imgR, imgGrayR, COLOR_BGR2GRAY);
|
||||
// 优化:这个地方的imgR_weight没有起到任何作用
|
||||
// 纯粹浪费CPU计算了
|
||||
Mat imgR_weight = imgR.clone();
|
||||
Mat infoRow;
|
||||
|
||||
for (uchar i = 0; i < detBoxes.rows; i++)
|
||||
{
|
||||
// 获得位置信息
|
||||
int x1 = detBoxes.at<float>(i, 0);
|
||||
int y1 = detBoxes.at<float>(i, 1);
|
||||
int x2 = detBoxes.at<float>(i, 2);
|
||||
int y2 = detBoxes.at<float>(i, 3);
|
||||
|
||||
// 获取置信度信息
|
||||
float Conf = detBoxes.at<float>(i, 4);
|
||||
int cls = detBoxes.at<float>(i, 5);
|
||||
|
||||
if (x1 > 600 || x2 < 50 || y1 < 5 || y2 > 475 || x1 < 2 || x2 > 590 || abs(x2 - x1) > 550) //当目标框偏左、偏右或者过大,略去该物体
|
||||
{
|
||||
int cls = detBoxes.at<float>(i, 5);
|
||||
|
||||
// 当目标框偏左、偏右或者过大,略去该物体
|
||||
if (x1 > 600 || x2 < 50 || y1 < 5 || y2 > 475 || x1 < 2 || x2 > 590 || abs(x2 - x1) > 550) {
|
||||
continue;
|
||||
}
|
||||
|
||||
rectangle(imgR, Point(int(x1), int(y1)),
|
||||
Point(int(x2), int(y2)), Scalar(0, 0, 255)); //绘制目标框
|
||||
// 绘制目标框
|
||||
rectangle(imgR, Point(int(x1), int(y1)),Point(int(x2), int(y2)), Scalar(0, 0, 255));
|
||||
|
||||
// 检测的是右图,利用模板匹配去找左图的位置
|
||||
int coordinates[4] = {x1, y1, x2, y2};
|
||||
std::vector<int> disp_pixel = muban(imgGrayL, imgGrayR, coordinates); //模板匹配
|
||||
float disp_pixel_x = disp_pixel[0] - x1; //ˮ计算水平视差
|
||||
float disp_pixel_y = disp_pixel[1] - y1; //
|
||||
disp_pixel_x = (int)(disp_pixel_x + disp_pixel_x * 0.12); //0.12为模板匹配产生的误差,为经验值,通过拟合得到
|
||||
// 模板匹配
|
||||
std::vector<int> disp_pixel = muban(imgGrayL, imgGrayR, coordinates);
|
||||
|
||||
// 这里用disp_x 有点奇怪?用dispDiffX
|
||||
// 计算水平视差 左图匹配的x坐标和右图的x坐标之差
|
||||
float disp_pixel_x = disp_pixel[0] - x1;
|
||||
// 垂直的误差没有起到作用
|
||||
float disp_pixel_y = disp_pixel[1] - y1;
|
||||
// 0.12为模板匹配产生的误差,为经验值,通过拟合得到
|
||||
disp_pixel_x = (int)(disp_pixel_x + disp_pixel_x * 0.12);
|
||||
|
||||
//Mat disp_matrix = Mat(1, 1, CV_32F, Scalar(disp_pixel_x)), disp_pixel_xyz;
|
||||
Mat disp_matrix = Mat(imgGrayL.rows, imgGrayL.cols, CV_32F, Scalar(disp_pixel_x)); //定义视差矩阵,所有值均为水平视差,方便转换为三维坐标,并具有水平距离信息
|
||||
// 定义视差矩阵,所有值均为水平视差,方便转换为三维坐标,并具有水平距离信息
|
||||
Mat disp_matrix = Mat(imgGrayL.rows, imgGrayL.cols, CV_32F, Scalar(disp_pixel_x));
|
||||
Mat threed_pixel_xyz, threedImage;
|
||||
reprojectImageTo3D(disp_matrix, threedImage, q, false); //2d->3d
|
||||
threed_pixel_xyz = threedImage.mul(threedImage); //每一像素点求平方,
|
||||
// 通过quat将显示图像从2d映射到3d
|
||||
reprojectImageTo3D(disp_matrix, threedImage, q, false);
|
||||
// 图像的3个通道,每一像素点求平方
|
||||
threed_pixel_xyz = threedImage.mul(threedImage);
|
||||
std::vector<Mat> channels;
|
||||
// 求平方后的结果,分离为3个通道,然后求和即是欧式距离
|
||||
split(threed_pixel_xyz.clone(), channels);
|
||||
threed_pixel_xyz = channels[0] + channels[1] + channels[2]; //计算欧式距离
|
||||
|
||||
// 计算欧式距离 = r^2 + g^2 + b^2
|
||||
threed_pixel_xyz = channels[0] + channels[1] + channels[2];
|
||||
threed_pixel_xyz.forEach<float>([](float &value, const int *position) { value = sqrt(value); }); // 获得距离d
|
||||
|
||||
int mid_pixel = int((x1 + x2) / 2);
|
||||
std::vector<float> mid = pic2cam(imgGrayR.cols / 2, imgGrayR.rows); //计算角度,从像素坐标转为相机坐标
|
||||
|
||||
// 计算角度,从像素坐标转为相机坐标
|
||||
// 这里用结构体比用vector更加节省CPU运算
|
||||
std::vector<float> mid = pic2cam(imgGrayR.cols / 2, imgGrayR.rows);
|
||||
std::vector<float> loc_tar = pic2cam(mid_pixel, imgGrayR.rows);
|
||||
float alfa = atan((loc_tar[0] - mid[0]) / q.at<double>(2, 3));
|
||||
|
||||
|
||||
if (disp_pixel_x > 240) // 距离太近,视差过大
|
||||
// 距离太近,视差过大
|
||||
if (disp_pixel_x > 240)
|
||||
{
|
||||
char cm[15];
|
||||
//sprintf(cm, "cannot match !");
|
||||
|
@ -217,29 +253,27 @@ void Ranging::getInfo(Mat &imgL, Mat &imgR, Mat &detBoxes, Mat &info)
|
|||
//sprintf(tf, "Too far!");
|
||||
char cm[15];
|
||||
//sprintf(cm, "cannot match !");
|
||||
sprintf(cm, "%d , %.2f", cls,Conf);
|
||||
sprintf(cm, "%d , %.2f", cls, Conf);
|
||||
putText(imgR, cm, Point((x1), (y1)), FONT_HERSHEY_PLAIN, 2.2, Scalar(0, 0, 255), 2);
|
||||
infoRow = (Mat_<float>(1, 4) << -1, -1, -1, -1);
|
||||
infoRow.copyTo(info.row(i));
|
||||
continue;
|
||||
}
|
||||
// <20><>ͼ<EFBFBD><CDBC><EFBFBD>ϻ<EFBFBD><CFBB><EFBFBD><EFBFBD><EFBFBD>Ϣ
|
||||
|
||||
// char dc[50], wh[50];
|
||||
// std::string cname = className[cls + 1];
|
||||
// sprintf(dc, "dis:%.2fcm %d", median, cls);
|
||||
// sprintf(wh, "W: %.2fcm H: %.2fcm alfa: %2f", w1, h1, alfa);
|
||||
// putText(imgR, dc, Point(x1, y2), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 2);
|
||||
// putText(imgR, wh, Point(x1, y1), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 1.5);
|
||||
|
||||
|
||||
//返回数据
|
||||
infoRow = (Mat_<float>(1, 4) << median, w1, h1, alfa);
|
||||
infoRow.copyTo(info.row(i));
|
||||
};
|
||||
}
|
||||
// cv::imshow("kk",imgR);
|
||||
// cv::waitKey(1);
|
||||
|
||||
//cv::imshow("kk",imgR);
|
||||
//cv::waitKey(1);
|
||||
}
|
||||
|
||||
Ranging::Ranging(int index, int imgw, int imgh) : //初始化
|
||||
|
@ -252,52 +286,54 @@ Ranging::Ranging(int index, int imgw, int imgh) : //初始化
|
|||
imgh(imgh)
|
||||
{
|
||||
// Z = Mat::zeros(2, 1, CV_32FC1);
|
||||
vcapture = cv::VideoCapture(index);
|
||||
//vcapture.set(CAP_PROP_FOURCC, CAP_OPENCV_MJPEG);
|
||||
vcapture.set(cv::CAP_PROP_FPS, 30);
|
||||
vcapture.set(cv::CAP_PROP_FRAME_WIDTH, imgw * 2);
|
||||
vcapture.set(cv::CAP_PROP_FRAME_HEIGHT, imgh);
|
||||
vcapture.set(cv::CAP_PROP_BUFFERSIZE, 1);
|
||||
|
||||
vcapture = cv::VideoCapture(index);
|
||||
//vcapture.set(CAP_PROP_FOURCC, CAP_OPENCV_MJPEG);
|
||||
vcapture.set(cv::CAP_PROP_FPS, 30);
|
||||
vcapture.set(cv::CAP_PROP_FRAME_WIDTH, imgw * 2);
|
||||
vcapture.set(cv::CAP_PROP_FRAME_HEIGHT, imgh);
|
||||
vcapture.set(cv::CAP_PROP_BUFFERSIZE, 1);
|
||||
|
||||
|
||||
auto imgSize = Size(imgw, imgh);
|
||||
Mat r1(3, 3, CV_64F), r2(3, 3, CV_64F), p1(3, 4, CV_64F), p2(3, 4, CV_64F);
|
||||
auto imgSize = Size(imgw, imgh);
|
||||
Mat r1(3, 3, CV_64F), r2(3, 3, CV_64F), p1(3, 4, CV_64F), p2(3, 4, CV_64F);
|
||||
|
||||
stereoRectify(cam_matrix_left.t(), distortion_l, cam_matrix_right.t(), distortion_r,
|
||||
imgSize, rotate.t(), trans, r1, r2, p1, p2, q);//立体校正
|
||||
stereoRectify(cam_matrix_left.t(), distortion_l, cam_matrix_right.t(), distortion_r,
|
||||
imgSize, rotate.t(), trans, r1, r2, p1, p2, q);//立体校正
|
||||
|
||||
initUndistortRectifyMap(cam_matrix_left.t(), distortion_l, r1, p1, imgSize, CV_32F, mapX1, mapX2);//计算无畸变和修正转换映射
|
||||
initUndistortRectifyMap(cam_matrix_right.t(), distortion_r, r2, p2, imgSize, CV_32F, mapY1, mapY2);//计算无畸变和修正转换映射
|
||||
// 计算无畸变和修正转换映射
|
||||
initUndistortRectifyMap(cam_matrix_left.t(), distortion_l, r1, p1, imgSize, CV_32F, mapX1, mapX2);
|
||||
// 计算无畸变和修正转换映射
|
||||
initUndistortRectifyMap(cam_matrix_right.t(), distortion_r, r2, p2, imgSize, CV_32F, mapY1, mapY2);
|
||||
|
||||
// RKNN_Create(&hdx, modelPath); // 初始化检测模型
|
||||
// RKNN_Create(&hdx, modelPath); // 初始化检测模型
|
||||
std::cout<< " ******************* CAMERA initialization ********************" << std::endl;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
std::vector<Mat> Ranging::get_range()
|
||||
{
|
||||
|
||||
double rang_old, rang_now;
|
||||
// rang_old = ros::Time::now().toSec(); //测试运行时间
|
||||
Mat frame, lframe, rframe;
|
||||
// vcapture.read(frame); //获取视频帧
|
||||
// 获取视频帧
|
||||
vcapture >> frame;
|
||||
|
||||
// cv::imshow("frame",frame);
|
||||
// 显示原始图像
|
||||
//cv::imshow("frame",frame);
|
||||
|
||||
if (!frame.empty())
|
||||
{
|
||||
int64 t = getTickCount();
|
||||
Mat lframe(frame.colRange(0, imgw).clone()); //拷贝左图
|
||||
Mat rframe(frame.colRange(imgw, imgw * 2).clone()); //拷贝右图
|
||||
// 拷贝左图
|
||||
Mat lframe(frame.colRange(0, imgw).clone());
|
||||
// 拷贝右图
|
||||
Mat rframe(frame.colRange(imgw, imgw * 2).clone());
|
||||
|
||||
rectifyImage(lframe, rframe); //
|
||||
rectifyImage(lframe, rframe);
|
||||
|
||||
// 这里拷贝一次,然后再yolo里面再拷贝一次,以及函数传参也拷贝了一次?
|
||||
cv::Mat Rframe = rframe.clone();
|
||||
|
||||
// yolov5s.outputPars修改为detectImage()
|
||||
detect_result_group = yolov5s.outputParse(Rframe);
|
||||
|
||||
if (detect_result_group.count<=0)
|
||||
|
@ -305,13 +341,17 @@ std::vector<Mat> Ranging::get_range()
|
|||
std::cout<<"detect nothing"<<std::endl;
|
||||
}
|
||||
|
||||
// detction box transfor to our format
|
||||
Mat detBoxes(detect_result_group.count, 5, CV_32F); //定义矩阵,存储目标检测内容,存储格式(x,y,x,y,conf,cls)
|
||||
// 定义矩阵,存储目标检测内容,存储格式(x,y,x,y,conf,cls)
|
||||
Mat detBoxes(detect_result_group.count, 5, CV_32F);
|
||||
char text[256];
|
||||
for (int i = 0; i < detect_result_group.count; i++) //存储目标检测内容 (x,y,x,y,conf,cls)
|
||||
// 存储目标检测内容 (x,y,x,y,conf,cls)
|
||||
|
||||
// 这里应该写一个函数drawDetectBobox()
|
||||
for (int i = 0; i < detect_result_group.count; i++)
|
||||
{
|
||||
detect_result_t *det_result = &(detect_result_group.results[i]);
|
||||
sprintf(text, "%s %.1f%%", det_result->name, det_result->prop * 100);
|
||||
|
||||
// printf("%s @ (%d %d %d %d) %f\n", det_result->name, det_result->box.left, det_result->box.top,
|
||||
// det_result->box.right, det_result->box.bottom, det_result->prop);
|
||||
|
||||
|
@ -330,51 +370,59 @@ std::vector<Mat> Ranging::get_range()
|
|||
detBoxes.at<float>(i, 3) = ymax;
|
||||
detBoxes.at<float>(i, 4) = det_result->prop;
|
||||
// 实验测试,过滤过大的误检框
|
||||
// float ratio = (xmax - xmin) * (ymax - ymin) / 308480.;
|
||||
// if (ratio > 0.7)
|
||||
// {
|
||||
// detBoxes.at<float>(i, 5) = -1;
|
||||
// continue;
|
||||
// }
|
||||
// float ratio = (xmax - xmin) * (ymax - ymin) / 308480.;
|
||||
// if (ratio > 0.7)
|
||||
// {
|
||||
// detBoxes.at<float>(i, 5) = -1;
|
||||
// continue;
|
||||
// }
|
||||
}
|
||||
|
||||
Mat info(detect_result_group.count, 4, CV_32F); // 存储测距信息,存储格式:距离d,宽w,高h,角度α
|
||||
if (detect_result_group.count)
|
||||
{
|
||||
getInfo(lframe, rframe, detBoxes, info);
|
||||
|
||||
//show stereo distance
|
||||
//zai zhe li kai ke neng cheng xu hui beng le
|
||||
// 存储测距信息,存储格式:距离d,宽w,高h,角度α
|
||||
Mat info(detect_result_group.count, 4, CV_32F);
|
||||
|
||||
// 如果有检测目标
|
||||
if (detect_result_group.count)
|
||||
{
|
||||
// getInfo()修改为calObjectRanging()
|
||||
// 在这里测量目标的距离并存储到info中
|
||||
// 优化: 这个地方重复的将左右视图转换为灰度图,浪费CPU的算力
|
||||
getInfo(lframe, rframe, detBoxes, info);
|
||||
|
||||
// for(int i=0; i<info.rows;i++)
|
||||
// {
|
||||
// detect_result_t *det_result = &(detect_result_group.results[i]);
|
||||
//show stereo distance
|
||||
//zai zhe li kai ke neng cheng xu hui beng le
|
||||
|
||||
// float median = info.at<float>(i, 0);
|
||||
// float w1 = info.at<float>(i, 1);
|
||||
// float h1 = info.at<float>(i, 2);
|
||||
// float alfa = info.at<float>(i, 3);
|
||||
|
||||
// int x1 = det_result->box.left;
|
||||
// int y1 = det_result->box.top;
|
||||
// int x2 = det_result->box.right;
|
||||
// int y2 = det_result->box.bottom;
|
||||
// char dc[50], wh[50];
|
||||
// sprintf(dc, "dis:%.2fcm ", median);
|
||||
// sprintf(wh, "W: %.2fcm H: %.2fcm ", w1, h1);
|
||||
// putText(Rframe, dc, Point(x1, y2), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 2);
|
||||
// putText(Rframe, wh, Point(x1, y1), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 1.5);
|
||||
// }
|
||||
}
|
||||
// for(int i=0; i<info.rows;i++)
|
||||
// {
|
||||
// detect_result_t *det_result = &(detect_result_group.results[i]);
|
||||
|
||||
t = getTickCount() - t;
|
||||
char fps[50];
|
||||
sprintf(fps, "fps: %d", int(1 / (t / getTickFrequency())));
|
||||
putText(Rframe, fps, Point(20, 20), FONT_HERSHEY_PLAIN, 1, Scalar(0, 0, 255), 1.5);
|
||||
|
||||
cv::imshow("k",Rframe);
|
||||
cv::waitKey(1);
|
||||
return std::vector<Mat>{rframe, detBoxes, info};
|
||||
// float median = info.at<float>(i, 0);
|
||||
// float w1 = info.at<float>(i, 1);
|
||||
// float h1 = info.at<float>(i, 2);
|
||||
// float alfa = info.at<float>(i, 3);
|
||||
|
||||
// int x1 = det_result->box.left;
|
||||
// int y1 = det_result->box.top;
|
||||
// int x2 = det_result->box.right;
|
||||
// int y2 = det_result->box.bottom;
|
||||
// char dc[50], wh[50];
|
||||
// sprintf(dc, "dis:%.2fcm ", median);
|
||||
// sprintf(wh, "W: %.2fcm H: %.2fcm ", w1, h1);
|
||||
// putText(Rframe, dc, Point(x1, y2), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 2);
|
||||
// putText(Rframe, wh, Point(x1, y1), FONT_HERSHEY_PLAIN, 1.5, Scalar(0, 0, 255), 1.5);
|
||||
// }
|
||||
}
|
||||
|
||||
// 这个地方应该整为一个函数,负责显示帧率图像
|
||||
t = getTickCount() - t;
|
||||
char fps[50];
|
||||
sprintf(fps, "fps: %d", int(1 / (t / getTickFrequency())));
|
||||
putText(Rframe, fps, Point(20, 20), FONT_HERSHEY_PLAIN, 1, Scalar(0, 0, 255), 1.5);
|
||||
|
||||
cv::imshow("k",Rframe);
|
||||
cv::waitKey(1);
|
||||
|
||||
return std::vector<Mat>{rframe, detBoxes, info};
|
||||
}
|
||||
return std::vector<Mat>{rframe};
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue