opencv+arduino實(shí)現(xiàn)物體點(diǎn)追蹤效果

本文所要實(shí)現(xiàn)的結(jié)果是:通過(guò)在攝像頭中選擇一個(gè)追蹤點(diǎn),通過(guò)pc控制攝像頭的舵機(jī),使這一點(diǎn)始終在圖像的中心。

網(wǎng)站建設(shè)哪家好,找成都創(chuàng)新互聯(lián)!專(zhuān)注于網(wǎng)頁(yè)設(shè)計(jì)、網(wǎng)站建設(shè)、微信開(kāi)發(fā)、小程序設(shè)計(jì)、集團(tuán)企業(yè)網(wǎng)站建設(shè)等服務(wù)項(xiàng)目。為回饋新老客戶(hù)創(chuàng)新互聯(lián)還提供了昭陽(yáng)免費(fèi)建站歡迎大家使用!

要點(diǎn):使用光流法在舵機(jī)旋轉(zhuǎn)的同時(shí)進(jìn)行追蹤,若該點(diǎn)運(yùn)動(dòng),則攝像頭跟蹤聯(lián)動(dòng)。

#include<opencv2\opencv.hpp> 
#include<opencv\cv.h> 
#include<opencv\highgui.h> 
#include<math.h> 
#include<Windows.h> 
#include<string.h> 
 
 
using namespace std; 
using namespace cv; 
 
#define WINDOW_NAME "【程序窗口】"    
 
void on_MouseHandle(int event, int x, int y, int flags, void* param); 
void DrawRectangle( cv::Mat& img, cv::Rect box ); 
void tracking(Mat &frame,vector<Point2f> temp); 
 
HANDLE hComm; 
LPCWSTR pStr=L"COM4"; 
char lpOutbuffer[100]; 
DWORD dwbyte=100; 
Mat srcImage,grayImage,tempImage1,tempImage,imageROI,grayprev; 
int g_maxCornerNumber = 1; 
double qualityLevel = 0.01; 
double minDistance = 10; 
int blockSize = 3; 
double k = 0.04; 
vector<Point2f> corners; 
vector<Point2f> pre_corners; 
vector<Point2f> counts; 
vector<uchar> status; 
vector<float> err; 
Rect g_rectangle; 
Rect g_temprectangle; 
bool g_bDrawingBox = false; 
 
 
int main( int argc, char** argv )  
{ 
  Mat frame; 
  Mat result; 
 
  COMSTAT Comstat; 
  DWORD dwError; 
  BOOL bWritestat; 
  hComm=CreateFile(pStr,GENERIC_READ | GENERIC_WRITE,0,0,OPEN_EXISTING, 0,NULL); 
  if (hComm == INVALID_HANDLE_VALUE) 
  { 
    cout<<"FLASE"; 
    return -1; 
  } 
  else 
  { 
    cout<<"TURE"; 
  } 
  DCB dcb; 
  GetCommState(hComm,&dcb); 
  dcb.BaudRate=9600; 
  dcb.ByteSize=8; 
  dcb.Parity=NOPARITY; 
  dcb.StopBits=TWOSTOPBITS; 
  bool set=SetCommState(hComm,&dcb); 
  bool sup=SetupComm(hComm,1024,1024); 
 
  VideoCapture capture(0); 
 
  namedWindow( WINDOW_NAME ); 
  setMouseCallback(WINDOW_NAME,on_MouseHandle,(void*)&frame); 
 
  while(1) 
  { 
    capture >> frame; 
 
      if(!frame.empty()) 
      {  
        cvtColor(frame,grayImage,CV_RGB2GRAY); 
        if( g_bDrawingBox )  
        rectangle(frame,g_rectangle.tl(),g_rectangle.br(),Scalar(255,255,255)); 
        if (corners.size()!=0) 
        { 
          bool can=PurgeComm(hComm,PURGE_TXCLEAR); 
          if (corners[0].x>(frame.cols/2+100)) 
          { 
            lpOutbuffer[0]='a'; 
            bool ne=WriteFile(hComm,lpOutbuffer,dwbyte,&dwbyte,NULL); 
          } 
          else if (corners[0].x<(frame.cols/2-100)) 
          { 
            lpOutbuffer[0]='b'; 
            bool ne=WriteFile(hComm,lpOutbuffer,dwbyte,&dwbyte,NULL); 
          } 
          tracking(frame,corners); 
          rectangle(frame,Point(corners[0].x-10,corners[0].y-10),Point(corners[0].x+10,corners[0].y+10),Scalar(255,255,255));          
        } 
        imshow( WINDOW_NAME, frame ); 
      } 
      else 
      {  
        printf(" --(!) No captured frame -- Break!"); 
        break; 
      } 
 
      int c = waitKey(50); 
      if( (char)c == 27 ) 
      { 
        break;  
      }  
  } 
  return 0; 
} 
void on_MouseHandle(int event, int x, int y, int flags, void* param) 
{ 
 
  Mat& image = *(cv::Mat*) param; 
  switch( event) 
  { 
  case EVENT_MOUSEMOVE:  
    { 
      if( g_bDrawingBox ) 
      { 
        g_rectangle.width = x-g_rectangle.x; 
        g_rectangle.height = y-g_rectangle.y; 
      } 
    } 
    break; 
 
  case EVENT_LBUTTONDOWN:  
    { 
      g_bDrawingBox = true; 
      g_rectangle =Rect( x, y, 0, 0 ); 
    } 
    break; 
 
  case EVENT_LBUTTONUP:  
    { 
      g_bDrawingBox = false; 
      if( g_rectangle.width < 0 ) 
      { 
        g_rectangle.x += g_rectangle.width; 
        g_rectangle.width *= -1; 
      } 
 
      if( g_rectangle.height < 0 )  
      { 
        g_rectangle.y += g_rectangle.height; 
        g_rectangle.height *= -1; 
      } 
      imageROI=grayImage(g_rectangle); 
      goodFeaturesToTrack( imageROI,corners,g_maxCornerNumber,qualityLevel,minDistance,Mat(),blockSize,false,k ); 
      for (int i = 0; i < corners.size(); i++) 
      { 
        corners[i].x=corners[i].x+g_rectangle.x; 
        corners[i].y=corners[i].y+g_rectangle.y; 
      } 
    } 
    break; 
 
  } 
} 
 
void tracking(Mat &frame,vector<Point2f> temp) 
{ 
  cvtColor(frame, tempImage1, COLOR_BGR2GRAY); 
 
  if (grayprev.empty()) 
  { 
    tempImage1.copyTo(grayprev); 
  } 
 
  calcOpticalFlowPyrLK(grayprev, tempImage1, temp, pre_corners, status, err); 
 
  for (size_t i=0; i<pre_corners.size(); i++) 
  { 
    line(frame, temp[i], pre_corners[i], Scalar(0, 0, 255)); 
    circle(frame, pre_corners[i], 4, Scalar(0, 255, 0), -1,8,0); 
  } 
 
  swap(pre_corners, corners); 
  swap(grayprev, tempImage1); 
} 

以上就是本文的全部?jī)?nèi)容,希望對(duì)大家的學(xué)習(xí)有所幫助,也希望大家多多支持創(chuàng)新互聯(lián)。

分享標(biāo)題:opencv+arduino實(shí)現(xiàn)物體點(diǎn)追蹤效果
標(biāo)題路徑:http://www.muchs.cn/article24/jiopce.html

成都網(wǎng)站建設(shè)公司_創(chuàng)新互聯(lián),為您提供網(wǎng)站設(shè)計(jì)公司面包屑導(dǎo)航、網(wǎng)站設(shè)計(jì)、網(wǎng)站策劃網(wǎng)站建設(shè)、自適應(yīng)網(wǎng)站

廣告

聲明:本網(wǎng)站發(fā)布的內(nèi)容(圖片、視頻和文字)以用戶(hù)投稿、用戶(hù)轉(zhuǎn)載內(nèi)容為主,如果涉及侵權(quán)請(qǐng)盡快告知,我們將會(huì)在第一時(shí)間刪除。文章觀點(diǎn)不代表本網(wǎng)站立場(chǎng),如需處理請(qǐng)聯(lián)系客服。電話(huà):028-86922220;郵箱:631063699@qq.com。內(nèi)容未經(jīng)允許不得轉(zhuǎn)載,或轉(zhuǎn)載時(shí)需注明來(lái)源: 創(chuàng)新互聯(lián)

成都定制網(wǎng)站網(wǎng)頁(yè)設(shè)計(jì)