首页
学习
活动
专区
圈层
工具
发布
社区首页 >问答首页 >OpenCV演示程序在iPad上以6-10 FPS运行,这是否正常?

OpenCV演示程序在iPad上以6-10 FPS运行,这是否正常?
EN

Stack Overflow用户
提问于 2014-01-04 18:43:49
回答 1查看 748关注 0票数 0

OpenCV检测和跟踪代码在我的iPad上运行在6-10 FPS之间.

这是正常的吗?

我想他们的“样本”代码会运行得尽可能快.

代码语言:javascript
复制
DetectTrackSample.cpp

#include <iostream>
#include "DetectTrackSample.h"
#include "ObjectTrackingClass.h"
#include "FeatureDetectionClass.h"
#include "Globals.h"

DetectTrackSample::DetectTrackSample()
: m_fdAlgorithmName("ORB")
, m_feAlgorithmName("FREAK")
, m_maxCorners(200)
, m_hessianThreshold(400)
, m_nFeatures(500)
, m_minMatches(4)
, m_drawMatches(true)
, m_drawPerspective(true)
{

    std::vector<std::string> fdAlgos, feAlgos, otAlgos;
    // feature detection options
    fdAlgos.push_back("ORB");
    fdAlgos.push_back("SURF");
    registerOption("Detector",       "", &m_fdAlgorithmName, fdAlgos);

    // feature extraction options
    feAlgos.push_back("ORB");
    feAlgos.push_back("SURF");
    feAlgos.push_back("FREAK");
    registerOption("Extractor",       "", &m_feAlgorithmName, feAlgos);

    // SURF feature detector options
    registerOption("hessianThreshold", "SURF", &m_hessianThreshold, 300, 500);

    // ORB feature detector options
    registerOption("nFeatures", "ORB", &m_nFeatures, 0, 1500);

    // matcher options
    registerOption("Minumum matches", "Matcher", &m_minMatches, 4, 200);

    // object tracking options
    registerOption("m_maxCorners", "Tracking", &m_maxCorners, 0, 1000);

    // Display options
    registerOption("Matches", "Draw", &m_drawMatches);
    registerOption("Perspective", "Draw", &m_drawPerspective);
}

//! Gets a sample name
std::string DetectTrackSample::getName() const
{
    return "Detection and Tracking";
}

std::string DetectTrackSample::getSampleIcon() const
{
    return "DetectTrackSampleIcon.png";
}

//! Returns a detailed sample description
std::string DetectTrackSample::getDescription() const
{
    return "Combined feature detection and object tracking sample.";
}

//! Returns true if this sample requires setting a reference image for latter use
bool DetectTrackSample::isReferenceFrameRequired() const
{
    return true;
}

//! Sets the reference frame for latter processing
void DetectTrackSample::setReferenceFrame(const cv::Mat& reference)
{
    getGray(reference, objectImage);
    computeObject = true;
}

// Reset object keypoints and descriptors
void DetectTrackSample::resetReferenceFrame() const
{
    detectObject = false;
    computeObject = false;
    trackObject = false;
}

//! Processes a frame and returns output image 
bool DetectTrackSample::processFrame(const cv::Mat& inputFrame, cv::Mat& outputFrame)
{
    // display the frame
    inputFrame.copyTo(outputFrame);

    // convert input frame to gray scale
    getGray(inputFrame, imageNext);

    // begin tracking object
    if ( trackObject ) {
        // prepare the tracking class
        ObjectTrackingClass tracker;
        tracker.setMaxCorners(m_maxCorners);

        // track object
        tracker.track(outputFrame,
                 imagePrev,
                 imageNext,
                 pointsPrev,
                 pointsNext,
                 status,
                 err);

        // check if the next points array isn't empty
        if ( pointsNext.empty() ) {
            // if it is, go back to detect
            trackObject = false;
            detectObject = true;
        }
    }

    // try to find the object in the scene
    if (detectObject) {

        // prepare the robust matcher and set paremeters
        FeatureDetectionClass rmatcher;
        rmatcher.setConfidenceLevel(0.98);
        rmatcher.setMinDistanceToEpipolar(1.0);
        rmatcher.setRatio(0.65f);

        // feature detector setup
        if (m_fdAlgorithmName == "SURF")
        {
            // prepare keypoints detector
            cv::Ptr<cv::FeatureDetector> detector = new cv::SurfFeatureDetector(m_hessianThreshold);
            rmatcher.setFeatureDetector(detector);
        }
        else if (m_fdAlgorithmName == "ORB")
        {
            // prepare feature detector and detect the object keypoints
            cv::Ptr<cv::FeatureDetector> detector = new cv::OrbFeatureDetector(m_nFeatures);
            rmatcher.setFeatureDetector(detector);
        }
        else
        {
            std::cerr << "Unsupported algorithm:" << m_fdAlgorithmName << std::endl;
            assert(false);
        }

        // feature extractor and matcher setup
        if (m_feAlgorithmName == "SURF")
        {
            // prepare feature extractor
            cv::Ptr<cv::DescriptorExtractor> extractor = new cv::SurfDescriptorExtractor;
            rmatcher.setDescriptorExtractor(extractor);
            // prepare the appropriate matcher for SURF 
            cv::Ptr<cv::DescriptorMatcher> matcher = new cv::BFMatcher(cv::NORM_L2, false);
            rmatcher.setDescriptorMatcher(matcher);

        } else if (m_feAlgorithmName == "ORB")
        {
            // prepare feature extractor
            cv::Ptr<cv::DescriptorExtractor> extractor = new cv::OrbDescriptorExtractor;
            rmatcher.setDescriptorExtractor(extractor);
            // prepare the appropriate matcher for ORB
            cv::Ptr<cv::DescriptorMatcher> matcher = new cv::BFMatcher(cv::NORM_HAMMING, false);
            rmatcher.setDescriptorMatcher(matcher);

        } else if (m_feAlgorithmName == "FREAK")
        {
            // prepare feature extractor
            cv::Ptr<cv::DescriptorExtractor> extractor = new cv::FREAK;
            rmatcher.setDescriptorExtractor(extractor);
            // prepare the appropriate matcher for FREAK
            cv::Ptr<cv::DescriptorMatcher> matcher = new cv::BFMatcher(cv::NORM_HAMMING, false);
            rmatcher.setDescriptorMatcher(matcher);
        }
        else {
            std::cerr << "Unsupported algorithm:" << m_feAlgorithmName << std::endl;
            assert(false);
        }

        // call the RobustMatcher to match the object keypoints with the scene keypoints
        cv::vector<cv::Point2f> objectKeypoints2f, sceneKeypoints2f;
        std::vector<cv::DMatch> matches;
        cv::Mat fundamentalMat = rmatcher.match(imageNext, // input scene image
                                                objectKeypoints, // input computed object image keypoints
                                                objectDescriptors, // input computed object image descriptors
                                                matches, // output matches
                                                objectKeypoints2f, // output object keypoints (Point2f)
                                                sceneKeypoints2f); // output scene keypoints (Point2f)

        if ( matches.size() >= m_minMatches ) { // assume something was detected

            // draw perspetcive lines (box object in the frame)
            if (m_drawPerspective)
                rmatcher.drawPerspective(outputFrame,
                                         objectImage,
                                         objectKeypoints2f,
                                         sceneKeypoints2f);

            // draw keypoint matches as yellow points on the output frame
            if (m_drawMatches)
                rmatcher.drawMatches(outputFrame,
                                     matches,
                                     sceneKeypoints2f);

            // init points array for tracking
            pointsNext = sceneKeypoints2f;

            // set flags
            detectObject = false;
            trackObject = true;
        }
    }

    // compute object image keypoints and descriptors
    if (computeObject) {

        // select feature detection mechanism
        if ( m_fdAlgorithmName == "SURF" )
        {
            // prepare keypoints detector
            cv::Ptr<cv::FeatureDetector> detector = new cv::SurfFeatureDetector(m_hessianThreshold);
            // Compute object keypoints
            detector->detect(objectImage,objectKeypoints);

        }
        else if ( m_fdAlgorithmName == "ORB" )
        {
            // prepare feature detector and detect the object keypoints
            cv::Ptr<cv::FeatureDetector> detector = new cv::OrbFeatureDetector(m_nFeatures);
            // Compute object keypoints
            detector->detect(objectImage,objectKeypoints);
        }
        else {
            std::cerr << "Unsupported algorithm:" << m_fdAlgorithmName << std::endl;
            assert(false);
        }

        // select feature extraction mechanism
        if ( m_feAlgorithmName == "SURF" )
        {
            cv::Ptr<cv::DescriptorExtractor> extractor = new cv::SurfDescriptorExtractor;
            // Compute object feature descriptors
            extractor->compute(objectImage,objectKeypoints,objectDescriptors);
        }
        else if ( m_feAlgorithmName == "ORB" )
        {
            cv::Ptr<cv::DescriptorExtractor> extractor = new cv::OrbDescriptorExtractor;
            // Compute object feature descriptors
            extractor->compute(objectImage,objectKeypoints,objectDescriptors);
        }
        else if ( m_feAlgorithmName == "FREAK" )
        {
            cv::Ptr<cv::DescriptorExtractor> extractor = new cv::FREAK;
            // Compute object feature descriptors
            extractor->compute(objectImage,objectKeypoints,objectDescriptors);
        }
        else {
            std::cerr << "Unsupported algorithm:" << m_feAlgorithmName << std::endl;
            assert(false);
        }

        // set flags
        computeObject = false;
        detectObject = true;        
    }

    // backup previous frame
    imageNext.copyTo(imagePrev);

    // backup points array
    std::swap(pointsNext, pointsPrev);

    return true;
}          
EN

回答 1

Stack Overflow用户

回答已采纳

发布于 2014-01-04 18:57:56

这是正常的。这取决于你的检测和跟踪代码。

例如:

在iPhone 4上,使用CV_HAAR_FIND_BIGGEST_OBJECT选项,演示应用程序在框架中实现4 fps。当没有脸出现时,这个值降到了1.5fps左右。如果没有CV_HAAR_FIND_BIGGEST_OBJECT选项,可以在1.8fps附近的帧中检测到多个面。请注意,实时视频预览总是在完整的30 fps上运行,而不管处理帧速率如何,如果只执行最小处理,则在30 fps上调用processFrame: minimal :视频定向:是调用的。

资料来源: 点击

票数 1
EN
页面原文内容由Stack Overflow提供。腾讯云小微IT领域专用引擎提供翻译支持
原文链接:

https://stackoverflow.com/questions/20925127

复制
相关文章

相似问题

领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档