主要思路是,讀入視頻,隔幀采用SURF計算匹配的特征點,進而計算兩圖的投影映射矩陣,做差分二值化,連通域檢測,繪制目標。
如果背景是靜態的采用camshift即可。
本文方法速度debug下大概2-3幀,release下8-9幀(SURF部分,不包含連通域以及繪制),后續可增加選定目標,動態模版小鄰域中跟蹤目標。實現對動態背景下的運動目標檢測,模版跟蹤速度可達150幀。
環境:opencv2.4.9 + vs2012
#include <iostream>#include <opencv2/opencv.hpp>#include <opencv2/nonfree/nonfree.hpp> using namespace cv;using namespace std;void main(){ //VideoCapture capture(0); VideoCapture capture("3.mov"); Mat image01,image02,imgdiff; while (true) { //隔兩幀配準 capture >> image01; if (image01.empty()) { break; } capture >> image02; capture >> image02; if (image02.empty()) { break; } //GaussianBlur(image02, image02, Size(3,3), 0); double time0 = static_cast<double>(getTickCount());//開始計時 //灰度圖轉換 Mat image1,image2; cvtColor(image01,image1,CV_RGB2GRAY); cvtColor(image02,image2,CV_RGB2GRAY); //提取特征點 SurfFeatureDetector surfDetector(2500); // 海塞矩陣閾值,高一點速度會快些 vector<KeyPoint> keyPoint1,keyPoint2; surfDetector.detect(image1,keyPoint1); surfDetector.detect(image2,keyPoint2); //特征點描述,為下邊的特征點匹配做準備 SurfDescriptorExtractor SurfDescriptor; Mat imageDesc1,imageDesc2; SurfDescriptor.compute(image1,keyPoint1,imageDesc1); SurfDescriptor.compute(image2,keyPoint2,imageDesc2); //獲得匹配特征點,并提取最優配對 FlannBasedMatcher matcher; vector<DMatch> matchePoints; matcher.match(imageDesc1,imageDesc2,matchePoints,Mat()); sort(matchePoints.begin(),matchePoints.end()); //特征點排序 //獲取排在前N個的最優匹配特征點 vector<Point2f> imagePoints1,imagePoints2; for(int i=0; i<(int)(matchePoints.size()*0.25); i++) { imagePoints1.push_back(keyPoint1[matchePoints[i].queryIdx].pt); imagePoints2.push_back(keyPoint2[matchePoints[i].trainIdx].pt); } //獲取圖像1到圖像2的投影映射矩陣 尺寸為3*3 Mat homo=findHomography(imagePoints1,imagePoints2,CV_RANSAC); //cout<<"變換矩陣為:/n"<<homo<<endl<<endl; //輸出映射矩陣 //圖像配準 Mat imageTransform1,imgpeizhun,imgerzhi; warpPerspective(image01,imageTransform1,homo,Size(image02.cols,image02.rows)); //imshow("經過透視矩陣變換后",imageTransform1); absdiff(image02, imageTransform1, imgpeizhun); //imshow("配準diff", imgpeizhun); threshold(imgpeizhun, imgerzhi, 50, 255.0 , CV_THRESH_BINARY); //imshow("配準二值化", imgerzhi); //輸出所需時間 time0 = ((double)getTickCount()-time0)/getTickFrequency(); cout<<1/time0<<endl; Mat temp,image02temp; float m_BiLi = 0.9; image02temp = image02.clone(); cvtColor(imgerzhi,temp,CV_RGB2GRAY); //檢索連通域 Mat se=getStructuringElement(MORPH_RECT, Size(5,5)); morphologyEx(temp, temp, MORPH_DILATE, se); vector<vector<Point>> contours; findContours(temp, contours, RETR_EXTERNAL, CHAIN_APPROX_NONE); if (contours.size()<1) { continue; } for (int k = 0; k < contours.size(); k++) { Rect bomen = boundingRect(contours[k]); //省略由于配準帶來的邊緣無效信息 if (bomen.x > image02temp.cols * (1 - m_BiLi) && bomen.y > image02temp.rows * (1 - m_BiLi) && bomen.x + bomen.width < image02temp.cols * m_BiLi && bomen.y + bomen.height < image02temp.rows * m_BiLi) { rectangle(image02temp, bomen, Scalar(255,0,255), 2, 8, 0); } } /* for (int i = 50; i < image02.rows - 100; i++) { for (int j = 50; j < image02.cols - 100; j++) { uchar pixel = temp.at<uchar>(i,j); if (pixel == 255) { Rect bomen(j-7, i-7, 14, 14); rectangle(image02, bomen, Scalar(255,255,255),1,8,0); } } } */ imshow("檢測與跟蹤",image02temp); waitKey(20); } }檢測遠處運動的車輛

新聞熱點
疑難解答