Refactored GPU/Keypoint detector/Descriptor extractor

git-svn-id: http://find-object.googlecode.com/svn/trunk/find_object@342 620bd6b2-0a58-f614-fd9a-1bd335dccda9
This commit is contained in:
matlabbe 2014-06-19 21:52:04 +00:00
parent 7bdf4e903e
commit 0dd16a6ef4
9 changed files with 269 additions and 163 deletions

View File

@ -66,7 +66,7 @@ void ImagesTcpServer::publishImage(const cv::Mat & image)
QDataStream out(&block, QIODevice::WriteOnly);
out.setVersion(QDataStream::Qt_4_0);
out << (quint64)0;
out.writeRawData((char*)buf.data(), buf.size());
out.writeRawData((char*)buf.data(), (int)buf.size());
out.device()->seek(0);
out << (quint64)(block.size() - sizeof(quint64));
(*iter)->write(block);

View File

@ -27,7 +27,6 @@ private slots:
void publishImage(const cv::Mat & image);
private:
float hz_;
Camera camera_;
};

View File

@ -27,8 +27,8 @@ AddObjectDialog::AddObjectDialog(Camera * camera, const cv::Mat & image, bool mi
ui_ = new Ui_addObjectDialog();
ui_->setupUi(this);
detector_ = Settings::createFeaturesDetector();
extractor_ = Settings::createDescriptorsExtractor();
detector_ = Settings::createKeypointDetector();
extractor_ = Settings::createDescriptorExtractor();
Q_ASSERT(detector_ != 0 && extractor_ != 0);
connect(ui_->pushButton_cancel, SIGNAL(clicked()), this, SLOT(cancel()));

View File

@ -14,6 +14,8 @@ class Ui_addObjectDialog;
class ObjWidget;
class Camera;
class KeypointItem;
class KeypointDetector;
class DescriptorExtractor;
class AddObjectDialog : public QDialog {
@ -47,8 +49,8 @@ private:
Camera * camera_;
ObjWidget * object_;
cv::Mat cvImage_;
cv::FeatureDetector * detector_;
cv::DescriptorExtractor * extractor_;
KeypointDetector * detector_;
DescriptorExtractor * extractor_;
enum State{kTakePicture, kSelectFeatures, kVerifySelection, kClosing};
int state_;

View File

@ -15,6 +15,7 @@
#include "AboutDialog.h"
#include "TcpServer.h"
#include "rtabmap/PdfPlot.h"
#include "Vocabulary.h"
#include <iostream>
#include <stdio.h>
@ -49,6 +50,7 @@ MainWindow::MainWindow(Camera * camera, const QString & settings, QWidget * pare
settings_(settings),
likelihoodCurve_(0),
inliersCurve_(0),
vocabulary_(new Vocabulary()),
lowestRefreshRate_(99),
objectsModified_(false),
tcpServer_(0),
@ -117,13 +119,14 @@ MainWindow::MainWindow(Camera * camera, const QString & settings, QWidget * pare
ui_->toolBox->updateParameter(Settings::kFeature2D_Fast_gpu());
ui_->toolBox->updateParameter(Settings::kFeature2D_ORB_gpu());
ui_->toolBox->getParameterWidget(Settings::kFeature2D_SURF_gpu())->setEnabled(false);
ui_->toolBox->getParameterWidget(Settings::kFeature2D_SURF_keypointsRatio())->setEnabled(false);
ui_->toolBox->getParameterWidget(Settings::kFeature2D_Fast_gpu())->setEnabled(false);
ui_->toolBox->getParameterWidget(Settings::kFeature2D_Fast_keypointsRatio())->setEnabled(false);
ui_->toolBox->getParameterWidget(Settings::kFeature2D_ORB_gpu())->setEnabled(false);
}
detector_ = Settings::createFeaturesDetector();
extractor_ = Settings::createDescriptorsExtractor();
detector_ = Settings::createKeypointDetector();
extractor_ = Settings::createDescriptorExtractor();
Q_ASSERT(detector_ != 0 && extractor_ != 0);
connect((QDoubleSpinBox*)ui_->toolBox->getParameterWidget(Settings::kCamera_4imageRate()),
@ -217,6 +220,7 @@ MainWindow::~MainWindow()
camera_->stop();
delete detector_;
delete extractor_;
delete vocabulary_;
objectsDescriptors_.clear();
qDeleteAll(objects_.begin(), objects_.end());
objects_.clear();
@ -349,7 +353,7 @@ int MainWindow::loadObjects(const QString & dirPath)
{
this->updateObjects();
}
loadedObjects = names.size();
loadedObjects = (int)names.size();
}
return loadedObjects;
}
@ -756,7 +760,7 @@ protected:
QTime time;
time.start();
printf("Extracting descriptors from object %d...\n", objectId_);
cv::FeatureDetector * detector = Settings::createFeaturesDetector();
KeypointDetector * detector = Settings::createKeypointDetector();
keypoints_.clear();
descriptors_ = cv::Mat();
detector->detect(image_, keypoints_);
@ -767,12 +771,12 @@ protected:
int maxFeatures = Settings::getFeature2D_3MaxFeatures();
if(maxFeatures > 0 && (int)keypoints_.size() > maxFeatures)
{
int previousCount = keypoints_.size();
int previousCount = (int)keypoints_.size();
keypoints_ = limitKeypoints(keypoints_, maxFeatures);
printf("obj=%d, %d keypoints removed, (kept %d), min/max response=%f/%f\n", objectId_, previousCount-(int)keypoints_.size(), (int)keypoints_.size(), keypoints_.size()?keypoints_.back().response:0.0f, keypoints_.size()?keypoints_.front().response:0.0f);
}
cv::DescriptorExtractor * extractor = Settings::createDescriptorsExtractor();
DescriptorExtractor * extractor = Settings::createDescriptorExtractor();
extractor->compute(image_, keypoints_, descriptors_);
delete extractor;
if((int)keypoints_.size() != descriptors_.rows)
@ -858,7 +862,7 @@ void MainWindow::updateData()
objectsDescriptors_.clear();
dataRange_.clear();
vocabulary_.clear();
vocabulary_->clear();
int count = 0;
int dim = -1;
int type = -1;
@ -944,13 +948,13 @@ void MainWindow::updateData()
int addedWords = 0;
for(int i=0; i<objects_.size(); ++i)
{
QMultiMap<int, int> words = vocabulary_.addWords(objects_[i]->descriptors(), i, incremental);
QMultiMap<int, int> words = vocabulary_->addWords(objects_[i]->descriptors(), i, incremental);
objects_[i]->setWords(words);
addedWords += words.uniqueKeys().size();
bool updated = false;
if(incremental && addedWords && addedWords >= updateVocabularyMinWords)
{
vocabulary_.update();
vocabulary_->update();
addedWords = 0;
updated = true;
}
@ -958,23 +962,23 @@ void MainWindow::updateData()
objects_[i]->id(),
words.uniqueKeys().size(),
objects_[i]->descriptors().rows,
vocabulary_.size(),
vocabulary_->size(),
localTime.restart(),
updated?"updated":"");
}
if(addedWords)
{
vocabulary_.update();
vocabulary_->update();
}
ui_->label_timeIndexing->setNum(time.elapsed());
ui_->label_vocabularySize->setNum(vocabulary_.size());
ui_->label_vocabularySize->setNum(vocabulary_->size());
if(incremental)
{
printf("Creating incremental vocabulary... done! size=%d (%d ms)\n", vocabulary_.size(), time.elapsed());
printf("Creating incremental vocabulary... done! size=%d (%d ms)\n", vocabulary_->size(), time.elapsed());
}
else
{
printf("Creating vocabulary... done! size=%d (%d ms)\n", vocabulary_.size(), time.elapsed());
printf("Creating vocabulary... done! size=%d (%d ms)\n", vocabulary_->size(), time.elapsed());
}
}
}
@ -1366,8 +1370,8 @@ void MainWindow::update(const cv::Mat & image)
ui_->imageView_source->setData(keypoints, cv::Mat(), image, Settings::currentDetectorType(), Settings::currentDescriptorType());
}
bool consistentNNData = (vocabulary_.size()!=0 && vocabulary_.wordToObjects().begin().value()!=-1 && Settings::getGeneral_invertedSearch()) ||
((vocabulary_.size()==0 || vocabulary_.wordToObjects().begin().value()==-1) && !Settings::getGeneral_invertedSearch());
bool consistentNNData = (vocabulary_->size()!=0 && vocabulary_->wordToObjects().begin().value()!=-1 && Settings::getGeneral_invertedSearch()) ||
((vocabulary_->size()==0 || vocabulary_->wordToObjects().begin().value()==-1) && !Settings::getGeneral_invertedSearch());
// COMPARE
if(!objectsDescriptors_.empty() &&
@ -1385,15 +1389,15 @@ void MainWindow::update(const cv::Mat & image)
{
// CREATE INDEX for the scene
//printf("Creating FLANN index (%s)\n", Settings::currentNearestNeighborType().toStdString().c_str());
vocabulary_.clear();
QMultiMap<int, int> words = vocabulary_.addWords(descriptors, -1, Settings::getGeneral_vocabularyIncremental());
vocabulary_->clear();
QMultiMap<int, int> words = vocabulary_->addWords(descriptors, -1, Settings::getGeneral_vocabularyIncremental());
if(!Settings::getGeneral_vocabularyIncremental())
{
vocabulary_.update();
vocabulary_->update();
}
ui_->imageView_source->setWords(words);
ui_->label_timeIndexing->setNum(time.restart());
ui_->label_vocabularySize->setNum(vocabulary_.size());
ui_->label_vocabularySize->setNum(vocabulary_->size());
}
if(Settings::getGeneral_invertedSearch() || Settings::getGeneral_threads() == 1)
@ -1407,14 +1411,14 @@ void MainWindow::update(const cv::Mat & image)
//match objects to scene
results = cv::Mat(objectsDescriptors_[0].rows, k, CV_32SC1); // results index
dists = cv::Mat(objectsDescriptors_[0].rows, k, CV_32FC1); // Distance results are CV_32FC1
vocabulary_.search(objectsDescriptors_[0], results, dists, k);
vocabulary_->search(objectsDescriptors_[0], results, dists, k);
}
else
{
//match scene to objects
results = cv::Mat(descriptors.rows, k, CV_32SC1); // results index
dists = cv::Mat(descriptors.rows, k, CV_32FC1); // Distance results are CV_32FC1
vocabulary_.search(descriptors, results, dists, k);
vocabulary_->search(descriptors, results, dists, k);
}
// PROCESS RESULTS
@ -1459,11 +1463,11 @@ void MainWindow::update(const cv::Mat & image)
if(Settings::getGeneral_invertedSearch())
{
int wordId = results.at<int>(i,0);
QList<int> objIndexes = vocabulary_.wordToObjects().values(wordId);
QList<int> objIndexes = vocabulary_->wordToObjects().values(wordId);
for(int j=0; j<objIndexes.size(); ++j)
{
// just add unique matches
if(vocabulary_.wordToObjects().count(wordId, objIndexes[j]) == 1)
if(vocabulary_->wordToObjects().count(wordId, objIndexes[j]) == 1)
{
matches[objIndexes[j]].insert(objects_.at(objIndexes[j])->words().value(wordId), i);
}
@ -1491,7 +1495,7 @@ void MainWindow::update(const cv::Mat & image)
unsigned int threadCounts = Settings::getGeneral_threads();
if(threadCounts == 0)
{
threadCounts = objectsDescriptors_.size();
threadCounts = (int)objectsDescriptors_.size();
}
for(unsigned int j=0; j<objectsDescriptors_.size(); j+=threadCounts)
{
@ -1499,7 +1503,7 @@ void MainWindow::update(const cv::Mat & image)
for(unsigned int k=j; k<j+threadCounts && k<objectsDescriptors_.size(); ++k)
{
threads.push_back(new SearchThread(&vocabulary_, k, &objectsDescriptors_[k], ui_->imageView_source));
threads.push_back(new SearchThread(vocabulary_, k, &objectsDescriptors_[k], ui_->imageView_source));
threads.back()->start();
}
@ -1823,6 +1827,9 @@ void MainWindow::notifyParametersChanged(const QStringList & paramChanged)
for(QStringList::const_iterator iter = paramChanged.begin(); iter!=paramChanged.end(); ++iter)
{
printf("Parameter changed: %s -> \"%s\"\n", iter->toStdString().c_str(), Settings::getParameter(*iter).toString().toStdString().c_str());
printf("lastObjectsUpdateParameters_.value(*iter)=%s, Settings::getParameter(*iter)=%s",
lastObjectsUpdateParameters_.value(*iter).toString().toStdString().c_str(),
Settings::getParameter(*iter).toString().toStdString().c_str());
if(lastObjectsUpdateParameters_.value(*iter) != Settings::getParameter(*iter))
{
if(!detectorDescriptorParamsChanged && iter->contains("Feature2D"))
@ -1837,6 +1844,7 @@ void MainWindow::notifyParametersChanged(const QStringList & paramChanged)
{
nearestNeighborParamsChanged = true;
}
lastObjectsUpdateParameters_[*iter] = Settings::getParameter(*iter);
}
if(iter->compare(Settings::kGeneral_port()) == 0 &&
@ -1852,8 +1860,8 @@ void MainWindow::notifyParametersChanged(const QStringList & paramChanged)
//Re-init detector and extractor
delete detector_;
delete extractor_;
detector_ = Settings::createFeaturesDetector();
extractor_ = Settings::createDescriptorsExtractor();
detector_ = Settings::createKeypointDetector();
extractor_ = Settings::createDescriptorExtractor();
Q_ASSERT(detector_ != 0 && extractor_ != 0);
}

View File

@ -12,11 +12,7 @@
#include <QtCore/QMap>
#include <QtCore/QByteArray>
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/imgproc/imgproc_c.h>
#include "Vocabulary.h"
#include <opencv2/opencv.hpp>
class Ui_mainWindow;
class ObjWidget;
@ -25,6 +21,9 @@ class ParametersToolBox;
class QLabel;
class AboutDialog;
class TcpServer;
class KeypointDetector;
class DescriptorExtractor;
class Vocabulary;
namespace rtabmap
{
@ -96,7 +95,7 @@ private:
AboutDialog * aboutDialog_;
QList<ObjWidget*> objects_;
std::vector<cv::Mat> objectsDescriptors_;
Vocabulary vocabulary_;
Vocabulary * vocabulary_;
QMap<int, int> dataRange_; // <last id of object's descriptor, id>
QTime updateRate_;
QTime refreshStartTime_;
@ -105,8 +104,8 @@ private:
QMap<int, QByteArray> imagesMap_;
TcpServer * tcpServer_;
QMap<QString, QVariant> lastObjectsUpdateParameters_; // ParametersMap
cv::FeatureDetector * detector_;
cv::DescriptorExtractor * extractor_;
KeypointDetector * detector_;
DescriptorExtractor * extractor_;
};
#endif /* MainWindow_H_ */

View File

@ -247,7 +247,7 @@ void ObjWidget::setData(const std::vector<cv::KeyPoint> & keypoints,
{
keypoints_ = keypoints;
descriptors_ = descriptors;
kptColors_ = QVector<QColor>(keypoints.size(), defaultColor());
kptColors_ = QVector<QColor>((int)keypoints.size(), defaultColor());
keypointItems_.clear();
rectItems_.clear();
graphicsView_->scene()->clear();

View File

@ -11,6 +11,7 @@
#include <opencv2/nonfree/features2d.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/nonfree/gpu.hpp>
#include <opencv2/gpu/gpu.hpp>
#define VERBOSE 0
@ -118,56 +119,77 @@ void Settings::saveSettings(const QString & fileName, const QByteArray & windowG
printf("Settings saved to %s\n", path.toStdString().c_str());
}
class GPUSURF : public cv::Feature2D
class GPUFeature2D
{
public:
GPUFeature2D() {}
virtual ~GPUFeature2D() {}
virtual void detectKeypoints(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints) = 0;
virtual void computeDescriptors(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
cv::Mat & descriptors) = 0;
};
class GPUSURF : public GPUFeature2D
{
public:
GPUSURF(double hessianThreshold,
int nOctaves = Settings::defaultFeature2D_SURF_nOctaves(),
int nOctaveLayers = Settings::defaultFeature2D_SURF_nOctaveLayers(),
bool extended = Settings::defaultFeature2D_SURF_extended(),
bool upright = Settings::defaultFeature2D_SURF_upright()) :
hessianThreshold_(hessianThreshold),
nOctaves_(nOctaves),
nOctaveLayers_(nOctaveLayers),
extended_(extended),
upright_(upright)
int nOctaves,
int nOctaveLayers,
bool extended,
float keypointsRatio,
bool upright) :
surf_(hessianThreshold,
nOctaves,
nOctaveLayers,
extended,
keypointsRatio,
upright)
{
}
virtual ~GPUSURF() {}
void operator()(cv::InputArray img, cv::InputArray mask,
std::vector<cv::KeyPoint>& keypoints,
cv::OutputArray descriptors,
bool useProvidedKeypoints=false) const
{
printf("GPUSURF:operator() Don't call this directly!\n");
exit(-1);
}
int descriptorSize() const
{
return extended_ ? 128 : 64;
}
int descriptorType() const
{
return CV_32F;
}
protected:
void detectImpl( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints, const cv::Mat& mask=cv::Mat() ) const
void detectKeypoints(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints)
{
cv::gpu::GpuMat imgGpu(image);
cv::gpu::GpuMat maskGpu(mask);
cv::gpu::SURF_GPU surfGpu(hessianThreshold_, nOctaves_, nOctaveLayers_, extended_, 0.01f, upright_);
surfGpu(imgGpu, maskGpu, keypoints);
try
{
surf_(imgGpu, cv::gpu::GpuMat(), keypoints);
}
catch(cv::Exception &e)
{
printf("GPUSURF error: %s \n(If something about layer_rows, parameter nOctaves=%d of SURF is too high for the size of the image (%d,%d).)\n",
e.msg.c_str(),
surf_.nOctaves,
image.cols,
image.rows);
printf("img.size().area()=%d surf.keypointsRatio=%d\n", imgGpu.size().area(), surf_.keypointsRatio);
}
}
void computeImpl( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints, cv::Mat& descriptors ) const
void computeDescriptors( const cv::Mat& image,
std::vector<cv::KeyPoint>& keypoints,
cv::Mat& descriptors)
{
std::vector<float> d;
cv::gpu::GpuMat imgGpu(image);
cv::gpu::SURF_GPU surfGpu(hessianThreshold_, nOctaves_, nOctaveLayers_, extended_, 0.01f, upright_);
cv::gpu::GpuMat descriptorsGPU;
surfGpu(imgGpu, cv::gpu::GpuMat(), keypoints, descriptorsGPU, true);
try
{
surf_(imgGpu, cv::gpu::GpuMat(), keypoints, descriptorsGPU, true);
}
catch(cv::Exception &e)
{
printf("GPUSURF error: %s \n(If something about layer_rows, parameter nOctaves=%d of SURF is too high for the size of the image (%d,%d).)\n",
e.msg.c_str(),
surf_.nOctaves,
image.cols,
image.rows);
printf("img.size().area()=%d surf.keypointsRatio=%d\n", imgGpu.size().area(), surf_.keypointsRatio);
}
// Download descriptors
if (descriptorsGPU.empty())
@ -179,44 +201,41 @@ protected:
descriptorsGPU.download(descriptors);
}
}
private:
double hessianThreshold_;
int nOctaves_;
int nOctaveLayers_;
bool extended_;
bool upright_;
cv::gpu::SURF_GPU surf_; // HACK: static because detectImpl() is const!
};
class GPUFAST : public cv::FeatureDetector
class GPUFAST : public GPUFeature2D
{
public:
GPUFAST(int threshold=Settings::defaultFeature2D_Fast_threshold(),
bool nonmaxSuppression=Settings::defaultFeature2D_Fast_nonmaxSuppression(),
double keypointsRatio=Settings::defaultFeature2D_Fast_keypointsRatio()) :
threshold_(threshold),
nonmaxSuppression_(nonmaxSuppression),
keypointsRatio_(keypointsRatio)
fast_(threshold,
nonmaxSuppression,
keypointsRatio)
{
}
virtual ~GPUFAST() {}
protected:
void detectImpl( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints, const cv::Mat& mask=cv::Mat() ) const
void detectKeypoints(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints)
{
cv::gpu::GpuMat imgGpu(image);
cv::gpu::GpuMat maskGpu(mask);
cv::gpu::FAST_GPU fastGpu(threshold_, nonmaxSuppression_, keypointsRatio_);
fastGpu(imgGpu, maskGpu, keypoints);
fast_(imgGpu, cv::gpu::GpuMat(), keypoints);
}
void computeDescriptors( const cv::Mat& image,
std::vector<cv::KeyPoint>& keypoints,
cv::Mat& descriptors)
{
printf("GPUFAST:computeDescriptors() Should not be used!\n");
}
private:
int threshold_;
bool nonmaxSuppression_;
double keypointsRatio_;
cv::gpu::FAST_GPU fast_;
};
class GPUORB : public cv::Feature2D
class GPUORB : public GPUFeature2D
{
public:
GPUORB(int nFeatures = Settings::defaultFeature2D_ORB_nFeatures(),
@ -229,56 +248,54 @@ public:
int patchSize = Settings::defaultFeature2D_ORB_patchSize(),
int fastThreshold = Settings::defaultFeature2D_Fast_threshold(),
bool fastNonmaxSupression = Settings::defaultFeature2D_Fast_nonmaxSuppression()) :
nFeatures_(nFeatures),
scaleFactor_(scaleFactor),
nLevels_(nLevels),
edgeThreshold_(edgeThreshold),
firstLevel_(firstLevel),
WTA_K_(WTA_K),
scoreType_(scoreType),
patchSize_(patchSize),
fastThreshold_(fastThreshold),
fastNonmaxSupression_(fastNonmaxSupression)
orb_(nFeatures,
scaleFactor,
nLevels,
edgeThreshold ,
firstLevel,
WTA_K,
scoreType,
patchSize)
{
orb_.setFastParams(fastThreshold, fastNonmaxSupression);
}
virtual ~GPUORB() {}
void operator()(cv::InputArray img, cv::InputArray mask,
std::vector<cv::KeyPoint>& keypoints,
cv::OutputArray descriptors,
bool useProvidedKeypoints=false) const
{
printf("GPUSURF:operator() Don't call this directly!\n");
exit(-1);
}
int descriptorSize() const
{
return cv::ORB::kBytes;
}
int descriptorType() const
{
return CV_8U;
}
protected:
void detectImpl( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints, const cv::Mat& mask=cv::Mat() ) const
void detectKeypoints(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints)
{
cv::gpu::GpuMat imgGpu(image);
cv::gpu::GpuMat maskGpu(mask);
cv::gpu::ORB_GPU orbGPU(nFeatures_, scaleFactor_, nLevels_, edgeThreshold_ ,firstLevel_, WTA_K_, scoreType_, patchSize_);
orbGPU.setFastParams(fastThreshold_, fastNonmaxSupression_);
orbGPU(imgGpu, maskGpu, keypoints);
try
{
orb_(imgGpu, cv::gpu::GpuMat(), keypoints);
}
catch(cv::Exception &e)
{
printf("GPUORB error: %s \n(If something about matrix size, the image/object may be too small (%d,%d).)\n",
e.msg.c_str(),
image.cols,
image.rows);
}
}
void computeImpl( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints, cv::Mat& descriptors ) const
void computeDescriptors( const cv::Mat& image,
std::vector<cv::KeyPoint>& keypoints,
cv::Mat& descriptors)
{
std::vector<float> d;
cv::gpu::GpuMat imgGpu(image);
cv::gpu::ORB_GPU orbGPU(nFeatures_, scaleFactor_, nLevels_, edgeThreshold_ ,firstLevel_, WTA_K_, scoreType_, patchSize_);
orbGPU.setFastParams(fastThreshold_, fastNonmaxSupression_);
cv::gpu::GpuMat descriptorsGPU;
orbGPU(imgGpu, cv::gpu::GpuMat(), keypoints, descriptorsGPU); // No option to use provided keypoints!?
try
{
orb_(imgGpu, cv::gpu::GpuMat(), keypoints, descriptorsGPU); // No option to use provided keypoints!?
}
catch(cv::Exception &e)
{
printf("GPUORB error: %s \n(If something about matrix size, the image/object may be too small (%d,%d).)\n",
e.msg.c_str(),
image.cols,
image.rows);
}
// Download descriptors
if (descriptorsGPU.empty())
descriptors = cv::Mat();
@ -289,24 +306,14 @@ protected:
descriptorsGPU.download(descriptors);
}
}
private:
int nFeatures_;
float scaleFactor_;
int nLevels_;
int edgeThreshold_;
int firstLevel_;
int WTA_K_;
int scoreType_;
int patchSize_;
int fastThreshold_;
bool fastNonmaxSupression_;
cv::gpu::ORB_GPU orb_;
};
cv::FeatureDetector * Settings::createFeaturesDetector()
KeypointDetector * Settings::createKeypointDetector()
{
cv::FeatureDetector * detector = 0;
GPUFeature2D * detectorGPU = 0;
QString str = getFeature2D_1Detector();
QStringList split = str.split(':');
if(split.size()==2)
@ -339,7 +346,7 @@ cv::FeatureDetector * Settings::createFeaturesDetector()
{
if(getFeature2D_Fast_gpu() && cv::gpu::getCudaEnabledDeviceCount())
{
detector = new GPUFAST(
detectorGPU = new GPUFAST(
getFeature2D_Fast_threshold(),
getFeature2D_Fast_nonmaxSuppression());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s GPU\n", strategies.at(index).toStdString().c_str());
@ -387,7 +394,7 @@ cv::FeatureDetector * Settings::createFeaturesDetector()
{
if(getFeature2D_ORB_gpu() && cv::gpu::getCudaEnabledDeviceCount())
{
detector = new GPUORB(
detectorGPU = new GPUORB(
getFeature2D_ORB_nFeatures(),
getFeature2D_ORB_scaleFactor(),
getFeature2D_ORB_nLevels(),
@ -444,11 +451,12 @@ cv::FeatureDetector * Settings::createFeaturesDetector()
{
if(getFeature2D_SURF_gpu() && cv::gpu::getCudaEnabledDeviceCount())
{
detector = new GPUSURF(
detectorGPU = new GPUSURF(
getFeature2D_SURF_hessianThreshold(),
getFeature2D_SURF_nOctaves(),
getFeature2D_SURF_nOctaveLayers(),
getFeature2D_SURF_extended(),
getFeature2D_SURF_keypointsRatio(),
getFeature2D_SURF_upright());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s (GPU)\n", strategies.at(index).toStdString().c_str());
}
@ -480,17 +488,22 @@ cv::FeatureDetector * Settings::createFeaturesDetector()
}
}
}
if(!detector)
Q_ASSERT(detectorGPU!=0 || detector!=0);
if(detectorGPU)
{
printf("ERROR: detector strategy not found !? Using default SURF...\n");
detector = new cv::SURF();
return new KeypointDetector(detectorGPU);
}
else
{
return new KeypointDetector(detector);
}
return detector;
}
cv::DescriptorExtractor * Settings::createDescriptorsExtractor()
DescriptorExtractor * Settings::createDescriptorExtractor()
{
cv::DescriptorExtractor * extractor = 0;
GPUFeature2D * extractorGPU = 0;
QString str = getFeature2D_2Descriptor();
QStringList split = str.split(':');
if(split.size()==2)
@ -517,7 +530,7 @@ cv::DescriptorExtractor * Settings::createDescriptorsExtractor()
{
if(getFeature2D_ORB_gpu() && cv::gpu::getCudaEnabledDeviceCount())
{
extractor = new GPUORB(
extractorGPU = new GPUORB(
getFeature2D_ORB_nFeatures(),
getFeature2D_ORB_scaleFactor(),
getFeature2D_ORB_nLevels(),
@ -562,11 +575,12 @@ cv::DescriptorExtractor * Settings::createDescriptorsExtractor()
{
if(getFeature2D_SURF_gpu() && cv::gpu::getCudaEnabledDeviceCount())
{
extractor = new GPUSURF(
extractorGPU = new GPUSURF(
getFeature2D_SURF_hessianThreshold(),
getFeature2D_SURF_nOctaves(),
getFeature2D_SURF_nOctaveLayers(),
getFeature2D_SURF_extended(),
getFeature2D_SURF_keypointsRatio(),
getFeature2D_SURF_upright());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s (GPU)\n", strategies.at(index).toStdString().c_str());
}
@ -609,12 +623,16 @@ cv::DescriptorExtractor * Settings::createDescriptorsExtractor()
}
}
}
if(!extractor)
Q_ASSERT(extractorGPU!=0 || extractor!=0);
if(extractorGPU)
{
printf("ERROR: descriptor strategy not found !? Using default SURF...\n");
extractor = new cv::SURF();
return new DescriptorExtractor(extractorGPU);
}
else
{
return new DescriptorExtractor(extractor);
}
return extractor;
}
QString Settings::currentDetectorType()
@ -808,3 +826,53 @@ int Settings::getHomographyMethod()
if(VERBOSE)printf("Settings::getHomographyMethod() method=%d\n", method);
return method;
}
KeypointDetector::KeypointDetector(cv::FeatureDetector * featureDetector) :
featureDetector_(featureDetector),
gpuFeature2D_(0)
{
Q_ASSERT(featureDetector_!=0);
}
KeypointDetector::KeypointDetector(GPUFeature2D * gpuFeature2D) :
featureDetector_(0),
gpuFeature2D_(gpuFeature2D)
{
Q_ASSERT(gpuFeature2D_!=0);
}
void KeypointDetector::detect(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints)
{
if(featureDetector_)
{
featureDetector_->detect(image, keypoints);
}
else // assume GPU
{
gpuFeature2D_->detectKeypoints(image, keypoints);
}
}
DescriptorExtractor::DescriptorExtractor(cv::DescriptorExtractor * descriptorExtractor) :
descriptorExtractor_(descriptorExtractor),
gpuFeature2D_(0)
{
Q_ASSERT(descriptorExtractor_!=0);
}
DescriptorExtractor::DescriptorExtractor(GPUFeature2D * gpuFeature2D) :
descriptorExtractor_(0),
gpuFeature2D_(gpuFeature2D)
{
Q_ASSERT(gpuFeature2D_!=0);
}
void DescriptorExtractor::compute(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
cv::Mat & descriptors)
{
if(descriptorExtractor_)
{
descriptorExtractor_->compute(image, keypoints, descriptors);
}
else // assume GPU
{
gpuFeature2D_->computeDescriptors(image, keypoints, descriptors);
}
}

View File

@ -10,7 +10,9 @@
#include <QtCore/QByteArray>
#include <opencv2/features2d/features2d.hpp>
class Camera;
class KeypointDetector;
class DescriptorExtractor;
class GPUFeature2D;
typedef QMap<QString, QVariant> ParametersMap; // Key, value
typedef QMap<QString, QString> ParametersType; // Key, type
@ -129,6 +131,7 @@ class Settings
PARAMETER(Feature2D, SURF_extended, bool, true, "Extended descriptor flag (true - use extended 128-element descriptors; false - use 64-element descriptors).");
PARAMETER(Feature2D, SURF_upright, bool, false, "Up-right or rotated features flag (true - do not compute orientation of features; false - compute orientation).");
PARAMETER(Feature2D, SURF_gpu, bool, false, "GPU-SURF: Use GPU version of SURF. This option is enabled only if OpenCV is built with CUDA and GPUs are detected.");
PARAMETER(Feature2D, SURF_keypointsRatio, float, 0.01f, "Used with SURF GPU.");
PARAMETER(Feature2D, BRISK_thresh, int, 30, "FAST/AGAST detection threshold score.");
PARAMETER(Feature2D, BRISK_octaves, int, 3, "Detection octaves. Use 0 to do single scale.");
@ -214,8 +217,8 @@ public:
static void resetParameter(const QString & key) {if(defaultParameters_.contains(key)) parameters_.insert(key, defaultParameters_.value(key));}
static QVariant getParameter(const QString & key) {return parameters_.value(key, QVariant());}
static cv::FeatureDetector * createFeaturesDetector();
static cv::DescriptorExtractor * createDescriptorsExtractor();
static KeypointDetector * createKeypointDetector();
static DescriptorExtractor * createDescriptorExtractor();
static QString currentDescriptorType();
static QString currentDetectorType();
@ -238,5 +241,32 @@ private:
static Settings dummyInit_;
};
class KeypointDetector
{
public:
KeypointDetector(cv::FeatureDetector * featureDetector);
KeypointDetector(GPUFeature2D * gpuFeature2D);
void detect(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints);
private:
cv::FeatureDetector * featureDetector_;
GPUFeature2D * gpuFeature2D_;
};
class DescriptorExtractor
{
public:
DescriptorExtractor(cv::DescriptorExtractor * descriptorExtractor);
DescriptorExtractor(GPUFeature2D * gpuFeature2D);
void compute(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
cv::Mat & descriptors);
private:
cv::DescriptorExtractor * descriptorExtractor_;
GPUFeature2D * gpuFeature2D_;
};
#endif /* SETTINGS_H_ */