从零开始的嵌入式图像图像处理(PI+QT+OpenCV)实战演练
最后,我们必须完成一个综合实验,来验证前面所做的一切工作。为了达到这个目的,将实验设定为:使用实时根据图像的特征(包括 ORB/SHIFT/SURF/BRISK),进行特征比对 。这样,就验证了opencv类库的编译(因为使用了contrib库,所以必须自己编译)、基本程序框架的运行(涉及摄像头操作)。并且我们是使用虚拟机(PC版本的PI系统)编译测试,而后移植到PI上面去的。
配置文件:
#
# Project created by QtCreator
2017
-
11
-
29T07
:
39
:
32
#
#
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
-
QT
+= core gui
greaterThan(QT_MAJOR_VERSION,
4)
: QT
+= widgets
TARGET
= GOQTTemplate2
TEMPLATE
= app
INCLUDEPATH
+=
/usr
/local
/
include
/opencv \
/usr
/local
/
include
/opencv2
LIBS
+=
/usr
/local
/lib
/libopencv_world.so
SOURCES
+= main.cpp\
mainwindow.cpp \
clickedlabel.cpp
HEADERS
+= mainwindow.h \
clickedlabel.h
FORMS
+= mainwindow.ui
主程序文件,简单说明流程:程序一开始就打开默认的摄像头,而后截获显示摄像头获取的数据。当有点击图片的操作的时候,保存当前图片作为模板,而后开始特征点匹配,并且显示匹配结果。有一个按钮能够切换不同的特征点算法:
//by jsxyhelu 2017/12/6
#
include
"mainwindow.h"
#
include
"ui_mainwindow.h"
#
include
<QMouseEvent
>
//全局变量
Mat src;
Mat gray;
Mat tmp;
Mat dst;
Mat matMatch;
//template
double m_lastTime;
//time
Mat grayLeft;
Mat grayRight;
Mat descriptorsLeft;
std
:
:vector
<KeyPoint
> keypointsLeft;
Mat descriptorsRight;
std
:
:vector
<KeyPoint
> keypointsRight;
std
:
:vector
< DMatch
> matches;
std
:
:vector
< DMatch
> good_matches;
Mat img_matches;
int imethod;
//0-ORB 1-SIFT 2-SURF 3-BRISK
using
namespace cv;
MainWindow
:
:MainWindow(QWidget
*parent)
:
QMainWindow(parent),
ui(
new Ui
:
:MainWindow)
{
ui
-
>setupUi(
this);
timer
=
new QTimer(
this);
imag
=
new QImage();
// 初始化
connect(timer, SIGNAL(timeout()),
this, SLOT(readFarme()));
// 时间到,读取当前摄像头信息
bMethod
=
false;
//是否使用算法
on_pushButton_clicked();
//main process
//take a picture
clickLabel
=
new ClickedLabel(
this);
clickLabel
-
>setGeometry(
0,
0,
800,
400);
connect(clickLabel,SIGNAL(clicked(ClickedLabel
*)),
this,SLOT(on_pushButton_3_clicked()));
imethod
=
0;
//ORB
setWindowState(Qt
:
:WindowMaximized);
//max
}
MainWindow
:
:
~MainWindow()
{
delete ui;
}
事件驱动///
//打开摄像头
void MainWindow
:
:on_pushButton_clicked()
{
//打开摄像头,从摄像头中获取视频
videocapture
=
new VideoCapture(
0);
// 开始计时,超时则发出timeout()信号
timer
-
>start(
33);
}
//main process 读取下一Frame图像 when timeout()
void MainWindow
:
:readFarme()
{
// 从摄像头中抓取并返回每一帧
videocapture
-
>read(matFrame);
src
= matFrame.clone();
m_lastTime
= (
double)getTickCount();
tmp
= matFrame.clone();
//final
cv
:
:resize(tmp,tmp,Size(
200,
200));
dst
= Mat(Size(tmp.cols
*
2,tmp.rows),tmp.type(),Scalar(
255));
tmp.copyTo(dst(cv
:
:Rect(
0,
0,
200,
200)));
//生成特征点算法及其匹配方法
Ptr
<Feature2D
> extractor;
BFMatcher matcher;
switch (imethod)
{
case
1
:
//"SIFT"
extractor
= SIFT
:
:create();
matcher
= BFMatcher(NORM_L2);
break;
case
2
:
//"SURF"
extractor
= SURF
:
:create();
matcher
= BFMatcher(NORM_L2);
break;
case
3
:
//"BRISK"
extractor
= BRISK
:
:create();
matcher
= BFMatcher(NORM_HAMMING);
break;
case
0
:
//"ORB"
extractor
= ORB
:
:create();
matcher
= BFMatcher(NORM_HAMMING);
break;
}
if(matMatch.rows
>
0)
{
//利用现有数据结构,对比对结构进行筛选
double max_dist
=
0;
double min_dist
=
100;
//action
cv
:
:resize(matMatch,matMatch,Size(
200,
200));
//gray
cvtColor(tmp,grayLeft,COLOR_BGR2GRAY);
cvtColor(matMatch,grayRight,COLOR_BGR2GRAY);
//寻找到特征点
extractor
-
>detectAndCompute(grayLeft,Mat(),keypointsLeft,descriptorsLeft);
extractor
-
>detectAndCompute(grayRight,Mat(),keypointsRight,descriptorsRight);
matcher.match( descriptorsLeft, descriptorsRight, matches );
//对现有距离进行排序
for(
int i
=
0; i
< descriptorsLeft.rows; i
++ )
{
double dist
= matches[i].distance;
if( dist
< min_dist ) min_dist
= dist;
if( dist
> max_dist ) max_dist
= dist;
}
for(
int i
=
0; i
< descriptorsLeft.rows; i
++ )
{
if( matches[i].distance
<
= max(
2
*min_dist,
0.
02) )
{
good_matches.push_back( matches[i]);
}
}
drawMatches( tmp, keypointsLeft, matMatch, keypointsRight, good_matches, dst );
//clear
good_matches.clear();
}
cv
:
:resize(dst,dst,Size(
800,
400));
switch (imethod)
{
case
0
:
putText(dst,
"METHOD:ORB",Point(
10,
350),CV_FONT_HERSHEY_DUPLEX,
1.0f,Scalar(
0,
0,
255));
break;
case
1
:
putText(dst,
"METHOD:SIFT",Point(
10,
350),CV_FONT_HERSHEY_DUPLEX,
1.0f,Scalar(
0,
0,
255));
break;
case
2
:
putText(dst,
"METHOD:SURF",Point(
10,
350),CV_FONT_HERSHEY_DUPLEX,
1.0f,Scalar(
0,
0,
255));
break;
case
3
:
putText(dst,
"METHOD:BRISK",Point(
10,
350),CV_FONT_HERSHEY_DUPLEX,
1.0f,Scalar(
0,
0,
255));
break;
}
// 格式转换
QPixmap qpixmap
= Mat2QImage(dst);
// 将图片显示到label上
clickLabel
-
>setPixmap(qpixmap);
m_lastTime
= (
double)getTickCount();
}
//method
void MainWindow
:
:on_pushButton_2_clicked()
{
if(imethod
==
4)
{
imethod
=
1;
}
else
{
imethod
+=
1;
}
}
//action
void MainWindow
:
:on_pushButton_3_clicked()
{
matMatch
= src.clone();
}
//exit
void MainWindow
:
:on_pushButton_4_clicked()
{
timer
-
>stop();
// 停止读取数据。
videocapture
-
>release();
//exit
QApplication
* app;
app
-
>exit(
0);
}
//helper函数//
//格式转换
QPixmap Mat2QImage(Mat src)
{
QImage img;
//根据QT的显示方法进行转换
if(src.channels()
==
3)
{
cvtColor( src, tmp, CV_BGR2RGB );
img
= QImage( (
const
unsigned
char
*)(tmp.data), tmp.cols, tmp.rows, QImage
:
:Format_RGB888 );
}
else
{
img
= QImage( (
const
unsigned
char
*)(src.data), src.cols, src.rows, QImage
:
:Format_Indexed8 );
}
QPixmap qimg
= QPixmap
:
:fromImage(img) ;
return qimg;
}