opencv基于光流匹配

来源:互联网 发布:myeclipse连接数据库 编辑:程序博客网 时间:2024/06/16 01:42

光流的概念:(Optical flow or optic flow)
它是一种运动模式,这种运动模式指的是一个物体、表面、边缘在一个视角下由一个观察者(比如眼睛、摄像头等)和背景之间形成的明显移动。光流技术,如运动检测和图像分割,时间碰撞,运动补偿编码,三维立体视差,都是利用了这种边缘或表面运动的技术。

光流原理以及条件介绍
光流介绍及opencv实现
在富特征上使用光流的优势在于处理过程通常较快且能容纳更多的匹配点,是重构更加密集。最擅长处理取自同一硬件平台下的连续图像。而富特征算法没有这个性质。
两个方法的不同之处:光流方法通常采用很基本的特征,比如关键点周围的图像块。而高阶的富特征方法如SURF将会对每个特征点采用更高阶的信息。
两种方法的选择依靠设计者根据输入来选择。
以下代码是在配合光流检测,并保存下光流检测出来的匹配。进行特征匹配

#include<opencv2/opencv.hpp>#include<opencv2/highgui/highgui.hpp>#include<opencv2/core/core.hpp>#include<opencv2/features2d/features2d.hpp>#include<opencv2/xfeatures2d/nonfree.hpp>using namespace std;using namespace cv;using namespace cv::xfeatures2d;void KeyPointsToPoints(vector<KeyPoint>kpts, vector<Point2f>&pts){    for (int i = 0; i < kpts.size(); i++)    {        pts.push_back(kpts[i].pt);    }    return;}int main(){    Mat srcImage1, srcImage2;    srcImage1 = imread("1.jpg",1);    srcImage2 = imread("2.jpg",1);    if (!srcImage1.data || !srcImage2.data)    {        cout << "读取出错" << endl;        return false;    }    vector<KeyPoint>left_keypoints, right_keypoints;    Ptr<FastFeatureDetector> ffd = FastFeatureDetector::create();    ffd->detect(srcImage1,left_keypoints);    ffd->detect(srcImage2,right_keypoints);    vector<Point2f>left_points;    KeyPointsToPoints(left_keypoints,left_points);    vector<Point2f>right_points(left_points.size());    KeyPointsToPoints(right_keypoints, right_points);    Mat srcImage1_gray, srcImage2_gray;    cvtColor(srcImage1,srcImage1_gray,CV_BGR2GRAY);    cvtColor(srcImage2,srcImage2_gray,CV_BGR2GRAY);    vector<uchar>vstatus;    vector<float>verror;    calcOpticalFlowPyrLK(srcImage1_gray,srcImage2_gray,left_points,right_points,vstatus,verror);    Mat imofkl = srcImage1.clone();    for (int i = 0; i < vstatus.size(); i++)    {        if (vstatus[i] && verror[i] < 12)        {            line(imofkl,left_points[i],right_points[i],CV_RGB(255,255,255),1,8,0);            circle(imofkl,right_points[i],3,CV_RGB(255,255,255),1,8,0);        }    }    imshow("光流",imofkl);    vector<Point2f>right_points_to_find;    vector<int>right_points_to_find_back_index;    for (unsigned int i = 0; i < vstatus.size(); i++)    {        if (vstatus[i]&& verror[i]< 12)        {            right_points_to_find_back_index.push_back(i);            right_points_to_find.push_back(right_points[i]);        }        else        {            vstatus[i] = 0;        }    }    Mat right_points_to_find_flat = Mat(right_points_to_find).reshape(1, right_points_to_find.size());    vector<Point2f>right_features;    KeyPointsToPoints(right_keypoints,right_features);    Mat right_features_flat = Mat(right_features).reshape(1,right_features.size());    BFMatcher matcher(CV_L2);    vector<vector<DMatch>>nearest_neighbors;    matcher.radiusMatch(right_points_to_find_flat,right_features_flat,nearest_neighbors,2.0f);    set<int>found_in_right_points;    vector<DMatch>matches;    for (int i = 0; i < nearest_neighbors.size(); i++)    {        DMatch _m;        if (nearest_neighbors[i].size() == 1)        {            _m = nearest_neighbors[i][0];        }        else if (nearest_neighbors[i].size() > 1)        {            double ratio = nearest_neighbors[i][0].distance / nearest_neighbors[i][1].distance;            if(ratio<0.7)            {                _m = nearest_neighbors[i][0];            }            else            {                continue;            }        }        else        {            continue;        }        if (found_in_right_points.find(_m.trainIdx) == found_in_right_points.end())        {            _m.queryIdx = right_points_to_find_back_index[_m.queryIdx];            matches.push_back(_m);            found_in_right_points.insert(_m.trainIdx);        }    }    cout << "pruned" << matches.size() << "/" << nearest_neighbors.size() << " matches" << endl;    Mat result;    drawMatches(srcImage1,left_keypoints,srcImage2,right_keypoints,matches,result);    imshow("结果",result);    waitKey(0);    return 0;}

这里写图片描述
这里写图片描述
这里写图片描述

原创粉丝点击