Added ASIFT (to activate, check parameter Feature2D/Affine) (fixed issue 27)

git-svn-id: http://find-object.googlecode.com/svn/trunk/find_object@385 620bd6b2-0a58-f614-fd9a-1bd335dccda9
This commit is contained in:
matlabbe 2014-08-22 22:36:09 +00:00
parent dcf406429e
commit 629b2aefaa
8 changed files with 409 additions and 144 deletions

View File

@ -102,6 +102,7 @@ void showUsage()
" --objects \"path\" Directory of the objects to detect.\n" " --objects \"path\" Directory of the objects to detect.\n"
" --config \"path\" Path to configuration file (default: %s).\n" " --config \"path\" Path to configuration file (default: %s).\n"
" --scene \"path\" Path to a scene image file.\n" " --scene \"path\" Path to a scene image file.\n"
" --debug Show debug log.\n"
" --params Show all parameters.\n" " --params Show all parameters.\n"
" --My/Parameter \"value\" Set find-Object's parameter (look --params for parameters' name).\n" " --My/Parameter \"value\" Set find-Object's parameter (look --params for parameters' name).\n"
" It will override the one in --config. Example to set 4 threads:\n" " It will override the one in --config. Example to set 4 threads:\n"
@ -209,6 +210,13 @@ int main(int argc, char* argv[])
guiMode = false; guiMode = false;
continue; continue;
} }
if(strcmp(argv[i], "-debug") == 0 ||
strcmp(argv[i], "--debug") == 0)
{
ULogger::setPrintWhere(true);
ULogger::setLevel(ULogger::kDebug);
continue;
}
if(strcmp(argv[i], "-help") == 0 || if(strcmp(argv[i], "-help") == 0 ||
strcmp(argv[i], "--help") == 0) strcmp(argv[i], "--help") == 0)
{ {

View File

@ -42,6 +42,7 @@ public:
enum TimeStamp{ enum TimeStamp{
kTimeKeypointDetection, kTimeKeypointDetection,
kTimeDescriptorExtraction, kTimeDescriptorExtraction,
kTimeSkewAffine,
kTimeIndexing, kTimeIndexing,
kTimeMatching, kTimeMatching,
kTimeHomography, kTimeHomography,

View File

@ -53,6 +53,13 @@ class DescriptorExtractor;
class FINDOBJECT_EXP FindObject : public QObject class FINDOBJECT_EXP FindObject : public QObject
{ {
Q_OBJECT; Q_OBJECT;
public:
static void affineSkew(float tilt,
float phi,
const cv::Mat & image,
cv::Mat & skewImage,
cv::Mat & skewMask,
cv::Mat & Ai);
public: public:
FindObject(QObject * parent = 0); FindObject(QObject * parent = 0);

View File

@ -97,6 +97,8 @@ class FINDOBJECT_EXP Settings
PARAMETER(Feature2D, 1Detector, QString, "7:Dense;Fast;GFTT;MSER;ORB;SIFT;Star;SURF;BRISK" , "Keypoint detector."); PARAMETER(Feature2D, 1Detector, QString, "7:Dense;Fast;GFTT;MSER;ORB;SIFT;Star;SURF;BRISK" , "Keypoint detector.");
PARAMETER(Feature2D, 2Descriptor, QString, "3:Brief;ORB;SIFT;SURF;BRISK;FREAK", "Keypoint descriptor."); PARAMETER(Feature2D, 2Descriptor, QString, "3:Brief;ORB;SIFT;SURF;BRISK;FREAK", "Keypoint descriptor.");
PARAMETER(Feature2D, 3MaxFeatures, int, 0, "Maximum features per image. If the number of features extracted is over this threshold, only X features with the highest response are kept. 0 means all features are kept."); PARAMETER(Feature2D, 3MaxFeatures, int, 0, "Maximum features per image. If the number of features extracted is over this threshold, only X features with the highest response are kept. 0 means all features are kept.");
PARAMETER(Feature2D, 4Affine, bool, false, "(ASIFT) Extract features on multiple affine transformations of the image.");
PARAMETER(Feature2D, 5AffineCount, int, 6, "(ASIFT) Higher the value, more affine transformations will be done.");
PARAMETER(Feature2D, Brief_bytes, int, 32, "Bytes is a length of descriptor in bytes. It can be equal 16, 32 or 64 bytes."); PARAMETER(Feature2D, Brief_bytes, int, 32, "Bytes is a length of descriptor in bytes. It can be equal 16, 32 or 64 bytes.");
@ -205,7 +207,7 @@ class FINDOBJECT_EXP Settings
PARAMETER(General, autoStartCamera, bool, false, "Automatically start the camera when the application is opened."); PARAMETER(General, autoStartCamera, bool, false, "Automatically start the camera when the application is opened.");
PARAMETER(General, autoUpdateObjects, bool, true, "Automatically update objects on every parameter changes, otherwise you would need to press \"Update objects\" on the objects panel."); PARAMETER(General, autoUpdateObjects, bool, true, "Automatically update objects on every parameter changes, otherwise you would need to press \"Update objects\" on the objects panel.");
PARAMETER(General, nextObjID, uint, 1, "Next object ID to use."); PARAMETER(General, nextObjID, uint, 1, "Next object ID to use.");
PARAMETER(General, imageFormats, QString, "*.png *.jpg *.bmp *.tiff *.ppm", "Image formats supported."); PARAMETER(General, imageFormats, QString, "*.png *.jpg *.bmp *.tiff *.ppm *.pgm", "Image formats supported.");
PARAMETER(General, videoFormats, QString, "*.avi *.m4v *.mp4", "Video formats supported."); PARAMETER(General, videoFormats, QString, "*.avi *.m4v *.mp4", "Video formats supported.");
PARAMETER(General, mirrorView, bool, true, "Flip the camera image horizontally (like all webcam applications)."); PARAMETER(General, mirrorView, bool, true, "Flip the camera image horizontally (like all webcam applications).");
PARAMETER(General, invertedSearch, bool, true, "Instead of matching descriptors from the objects to those in a vocabulary created with descriptors extracted from the scene, we create a vocabulary from all the objects' descriptors and we match scene's descriptors to this vocabulary. It is the inverted search mode."); PARAMETER(General, invertedSearch, bool, true, "Instead of matching descriptors from the objects to those in a vocabulary created with descriptors extracted from the scene, we create a vocabulary from all the objects' descriptors and we match scene's descriptors to this vocabulary. It is the inverted search mode.");
@ -283,7 +285,9 @@ public:
KeypointDetector(cv::FeatureDetector * featureDetector); KeypointDetector(cv::FeatureDetector * featureDetector);
KeypointDetector(GPUFeature2D * gpuFeature2D); KeypointDetector(GPUFeature2D * gpuFeature2D);
void detect(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints); void detect(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
const cv::Mat & mask = cv::Mat());
private: private:
cv::FeatureDetector * featureDetector_; cv::FeatureDetector * featureDetector_;

View File

@ -202,61 +202,275 @@ std::vector<cv::KeyPoint> limitKeypoints(const std::vector<cv::KeyPoint> & keypo
return kptsKept; return kptsKept;
} }
// taken from ASIFT example https://github.com/Itseez/opencv/blob/master/samples/python2/asift.py
// affine - is an affine transform matrix from skew_img to img
void FindObject::affineSkew(
float tilt,
float phi,
const cv::Mat & image,
cv::Mat & skewImage,
cv::Mat & skewMask,
cv::Mat & Ai)
{
float h = image.rows;
float w = image.cols;
cv::Mat A = cv::Mat::zeros(2,3,CV_32FC1);
A.at<float>(0,0) = A.at<float>(1,1) = 1;
skewMask = cv::Mat::ones(h, w, CV_8U) * 255;
if(phi != 0.0)
{
phi = phi*CV_PI/180.0f; // deg2rad
float s = std::sin(phi);
float c = std::cos(phi);
cv::Mat A22 = (cv::Mat_<float>(2, 2) <<
c, -s,
s, c);
cv::Mat cornersIn = (cv::Mat_<float>(4, 2) <<
0,0,
w,0,
w,h,
0,h);
cv::Mat cornersOut = cornersIn * A22.t();
cv::Rect rect = cv::boundingRect(cornersOut.reshape(2,4));
A = (cv::Mat_<float>(2, 3) <<
c, -s, -rect.x,
s, c, -rect.y);
cv::warpAffine(image, skewImage, A, cv::Size(rect.width, rect.height), cv::INTER_LINEAR, cv::BORDER_REPLICATE);
}
else
{
skewImage = image;
}
if(tilt != 1.0)
{
float s = 0.8*std::sqrt(tilt*tilt-1);
cv::Mat out, out2;
cv::GaussianBlur(skewImage, out, cv::Size(0, 0), s, 0.01);
cv::resize(out, out2, cv::Size(0, 0), 1.0/tilt, 1.0, cv::INTER_NEAREST);
skewImage = out2;
A.row(0) /= tilt;
}
if(phi != 0.0 || tilt != 1.0)
{
cv::Mat mask = skewMask;
cv::warpAffine(mask, skewMask, A, skewImage.size(), cv::INTER_NEAREST);
}
cv::invertAffineTransform(A, Ai);
}
class AffineExtractionThread : public QThread
{
public:
AffineExtractionThread(
KeypointDetector * detector,
DescriptorExtractor * extractor,
const cv::Mat & image,
float tilt,
float phi) :
detector_(detector),
extractor_(extractor),
image_(image),
tilt_(tilt),
phi_(phi),
timeSkewAffine_(0),
timeDetection_(0),
timeExtraction_(0)
{
UASSERT(detector && extractor);
}
const cv::Mat & image() const {return image_;}
const std::vector<cv::KeyPoint> & keypoints() const {return keypoints_;}
const cv::Mat & descriptors() const {return descriptors_;}
int timeSkewAffine() const {return timeSkewAffine_;}
int timeDetection() const {return timeDetection_;}
int timeExtraction() const {return timeExtraction_;}
protected:
virtual void run()
{
QTime timeStep;
timeStep.start();
cv::Mat skewImage, skewMask, Ai;
FindObject::affineSkew(tilt_, phi_, image_, skewImage, skewMask, Ai);
timeSkewAffine_=timeStep.restart();
//Detect features
detector_->detect(skewImage, keypoints_, skewMask);
if(keypoints_.size())
{
int maxFeatures = Settings::getFeature2D_3MaxFeatures();
if(maxFeatures > 0 && (int)keypoints_.size() > maxFeatures)
{
keypoints_ = limitKeypoints(keypoints_, maxFeatures);
}
timeDetection_=timeStep.restart();
//Extract descriptors
extractor_->compute(skewImage, keypoints_, descriptors_);
timeExtraction_=timeStep.restart();
// Transform points to original image coordinates
for(unsigned int i=0; i<keypoints_.size(); ++i)
{
cv::Mat p = (cv::Mat_<float>(3, 1) << keypoints_[i].pt.x, keypoints_[i].pt.y, 1);
cv::Mat pa = Ai * p;
keypoints_[i].pt.x = pa.at<float>(0,0);
keypoints_[i].pt.y = pa.at<float>(1,0);
}
}
else
{
timeDetection_=timeStep.restart();
}
}
private:
KeypointDetector * detector_;
DescriptorExtractor * extractor_;
cv::Mat image_;
float tilt_;
float phi_;
std::vector<cv::KeyPoint> keypoints_;
cv::Mat descriptors_;
int timeSkewAffine_;
int timeDetection_;
int timeExtraction_;
};
class ExtractFeaturesThread : public QThread class ExtractFeaturesThread : public QThread
{ {
public: public:
ExtractFeaturesThread(int objectId, const cv::Mat & image) : ExtractFeaturesThread(
KeypointDetector * detector,
DescriptorExtractor * extractor,
int objectId,
const cv::Mat & image) :
detector_(detector),
extractor_(extractor),
objectId_(objectId), objectId_(objectId),
image_(image) image_(image),
timeSkewAffine_(0),
timeDetection_(0),
timeExtraction_(0)
{ {
UASSERT(detector && extractor);
} }
int objectId() const {return objectId_;} int objectId() const {return objectId_;}
const cv::Mat & image() const {return image_;} const cv::Mat & image() const {return image_;}
const std::vector<cv::KeyPoint> & keypoints() const {return keypoints_;} const std::vector<cv::KeyPoint> & keypoints() const {return keypoints_;}
const cv::Mat & descriptors() const {return descriptors_;} const cv::Mat & descriptors() const {return descriptors_;}
int timeSkewAffine() const {return timeSkewAffine_;}
int timeDetection() const {return timeDetection_;}
int timeExtraction() const {return timeExtraction_;}
protected: protected:
virtual void run() virtual void run()
{ {
QTime time; QTime time;
time.start(); time.start();
UINFO("Extracting descriptors from object %d...", objectId_); UINFO("Extracting descriptors from object %d...", objectId_);
KeypointDetector * detector = Settings::createKeypointDetector();
keypoints_.clear();
descriptors_ = cv::Mat();
detector->detect(image_, keypoints_);
delete detector;
if(keypoints_.size()) QTime timeStep;
timeStep.start();
if(!Settings::getFeature2D_4Affine())
{ {
int maxFeatures = Settings::getFeature2D_3MaxFeatures(); keypoints_.clear();
if(maxFeatures > 0 && (int)keypoints_.size() > maxFeatures) descriptors_ = cv::Mat();
{ detector_->detect(image_, keypoints_);
int previousCount = (int)keypoints_.size();
keypoints_ = limitKeypoints(keypoints_, maxFeatures);
UINFO("obj=%d, %d keypoints removed, (kept %d), min/max response=%f/%f", objectId_, previousCount-(int)keypoints_.size(), (int)keypoints_.size(), keypoints_.size()?keypoints_.back().response:0.0f, keypoints_.size()?keypoints_.front().response:0.0f);
}
DescriptorExtractor * extractor = Settings::createDescriptorExtractor(); if(keypoints_.size())
extractor->compute(image_, keypoints_, descriptors_);
delete extractor;
if((int)keypoints_.size() != descriptors_.rows)
{ {
UERROR("obj=%d kpt=%d != descriptors=%d", objectId_, (int)keypoints_.size(), descriptors_.rows); int maxFeatures = Settings::getFeature2D_3MaxFeatures();
if(maxFeatures > 0 && (int)keypoints_.size() > maxFeatures)
{
int previousCount = (int)keypoints_.size();
keypoints_ = limitKeypoints(keypoints_, maxFeatures);
UDEBUG("obj=%d, %d keypoints removed, (kept %d), min/max response=%f/%f", objectId_, previousCount-(int)keypoints_.size(), (int)keypoints_.size(), keypoints_.size()?keypoints_.back().response:0.0f, keypoints_.size()?keypoints_.front().response:0.0f);
}
timeDetection_+=timeStep.restart();
extractor_->compute(image_, keypoints_, descriptors_);
timeExtraction_+=timeStep.restart();
if((int)keypoints_.size() != descriptors_.rows)
{
UERROR("obj=%d kpt=%d != descriptors=%d", objectId_, (int)keypoints_.size(), descriptors_.rows);
}
}
else
{
timeDetection_+=timeStep.restart();
UWARN("no features detected in object %d !?!", objectId_);
} }
} }
else else
{ {
UWARN("no features detected in object %d !?!", objectId_); //ASIFT
std::vector<float> tilts;
std::vector<float> phis;
tilts.push_back(1.0f);
phis.push_back(0.0f);
int nTilt = Settings::getFeature2D_5AffineCount();
for(int t=1; t<nTilt; ++t)
{
float tilt = std::pow(2.0f, 0.5f*float(t));
float inc = 72.0f / float(tilt);
for(float phi=0.0f; phi<180.0f; phi+=inc)
{
tilts.push_back(tilt);
phis.push_back(phi);
}
}
//multi-threaded
unsigned int threadCounts = Settings::getGeneral_threads();
if(threadCounts == 0)
{
threadCounts = tilts.size();
}
for(unsigned int i=0; i<tilts.size(); i+=threadCounts)
{
QVector<AffineExtractionThread*> threads;
for(unsigned int k=i; k<i+threadCounts && k<tilts.size(); ++k)
{
threads.push_back(new AffineExtractionThread(detector_, extractor_, image_, tilts[k], phis[k]));
threads.back()->start();
}
for(int k=0; k<threads.size(); ++k)
{
threads[k]->wait();
keypoints_.insert(keypoints_.end(), threads[k]->keypoints().begin(), threads[k]->keypoints().end());
descriptors_.push_back(threads[k]->descriptors());
timeSkewAffine_ += threads[k]->timeSkewAffine();
timeDetection_ += threads[k]->timeDetection();
timeExtraction_ += threads[k]->timeExtraction();
}
}
} }
UINFO("%d descriptors extracted from object %d (in %d ms)", descriptors_.rows, objectId_, time.elapsed()); UINFO("%d descriptors extracted from object %d (in %d ms)", descriptors_.rows, objectId_, time.elapsed());
} }
private: private:
KeypointDetector * detector_;
DescriptorExtractor * extractor_;
int objectId_; int objectId_;
cv::Mat image_; cv::Mat image_;
std::vector<cv::KeyPoint> keypoints_; std::vector<cv::KeyPoint> keypoints_;
cv::Mat descriptors_; cv::Mat descriptors_;
int timeSkewAffine_;
int timeDetection_;
int timeExtraction_;
}; };
void FindObject::updateObjects() void FindObject::updateObjects()
@ -278,7 +492,7 @@ void FindObject::updateObjects()
QVector<ExtractFeaturesThread*> threads; QVector<ExtractFeaturesThread*> threads;
for(int k=i; k<i+threadCounts && k<objectsList.size(); ++k) for(int k=i; k<i+threadCounts && k<objectsList.size(); ++k)
{ {
threads.push_back(new ExtractFeaturesThread(objectsList.at(k)->id(), objectsList.at(k)->image())); threads.push_back(new ExtractFeaturesThread(detector_, extractor_, objectsList.at(k)->id(), objectsList.at(k)->image()));
threads.back()->start(); threads.back()->start();
} }
@ -677,30 +891,15 @@ bool FindObject::detect(const cv::Mat & image, find_object::DetectionInfo & info
grayscaleImg = image; grayscaleImg = image;
} }
QTime time; // DETECT FEATURES AND EXTRACT DESCRIPTORS
time.start(); ExtractFeaturesThread extractThread(detector_, extractor_, -1, grayscaleImg);
extractThread.start();
// EXTRACT KEYPOINTS extractThread.wait();
detector_->detect(grayscaleImg, info.sceneKeypoints_); info.sceneKeypoints_ = extractThread.keypoints();
info.timeStamps_.insert(DetectionInfo::kTimeKeypointDetection, time.restart()); info.sceneDescriptors_ = extractThread.descriptors();
info.timeStamps_.insert(DetectionInfo::kTimeKeypointDetection, extractThread.timeDetection());
bool emptyScene = info.sceneKeypoints_.size() == 0; info.timeStamps_.insert(DetectionInfo::kTimeDescriptorExtraction, extractThread.timeExtraction());
if(info.sceneKeypoints_.size()) info.timeStamps_.insert(DetectionInfo::kTimeSkewAffine, extractThread.timeSkewAffine());
{
int maxFeatures = Settings::getFeature2D_3MaxFeatures();
if(maxFeatures > 0 && (int)info.sceneKeypoints_.size() > maxFeatures)
{
info.sceneKeypoints_ = limitKeypoints(info.sceneKeypoints_, maxFeatures);
}
// EXTRACT DESCRIPTORS
extractor_->compute(grayscaleImg, info.sceneKeypoints_, info.sceneDescriptors_);
if((int)info.sceneKeypoints_.size() != info.sceneDescriptors_.rows)
{
UERROR("kpt=%d != descriptors=%d", (int)info.sceneKeypoints_.size(), info.sceneDescriptors_.rows);
}
}
info.timeStamps_.insert(DetectionInfo::kTimeDescriptorExtraction, time.restart());
bool consistentNNData = (vocabulary_->size()!=0 && vocabulary_->wordToObjects().begin().value()!=-1 && Settings::getGeneral_invertedSearch()) || bool consistentNNData = (vocabulary_->size()!=0 && vocabulary_->wordToObjects().begin().value()!=-1 && Settings::getGeneral_invertedSearch()) ||
((vocabulary_->size()==0 || vocabulary_->wordToObjects().begin().value()==-1) && !Settings::getGeneral_invertedSearch()); ((vocabulary_->size()==0 || vocabulary_->wordToObjects().begin().value()==-1) && !Settings::getGeneral_invertedSearch());
@ -713,6 +912,8 @@ bool FindObject::detect(const cv::Mat & image, find_object::DetectionInfo & info
objectsDescriptors_.begin().value().type() == info.sceneDescriptors_.type()) // binary descriptor issue, if the dataTree is not yet updated with modified settings objectsDescriptors_.begin().value().type() == info.sceneDescriptors_.type()) // binary descriptor issue, if the dataTree is not yet updated with modified settings
{ {
success = true; success = true;
QTime time;
time.start();
QMultiMap<int, int> words; QMultiMap<int, int> words;
@ -778,7 +979,10 @@ bool FindObject::detect(const cv::Mat & image, find_object::DetectionInfo & info
matched = false; matched = false;
} }
} }
if(!matched && !Settings::getNearestNeighbor_3nndrRatioUsed() && !Settings::getNearestNeighbor_5minDistanceUsed()) if(!matched &&
!Settings::getNearestNeighbor_3nndrRatioUsed() &&
!Settings::getNearestNeighbor_5minDistanceUsed() &&
dists.at<float>(i,0) >= 0.0f)
{ {
matched = true; // no criterion, match to the nearest descriptor matched = true; // no criterion, match to the nearest descriptor
} }
@ -1024,7 +1228,7 @@ bool FindObject::detect(const cv::Mat & image, find_object::DetectionInfo & info
{ {
UWARN("Cannot search, objects must be updated"); UWARN("Cannot search, objects must be updated");
} }
else if(emptyScene) else if(info.sceneKeypoints_.size() == 0)
{ {
// Accept but warn the user // Accept but warn the user
UWARN("No features detected in the scene!?!"); UWARN("No features detected in the scene!?!");

View File

@ -786,6 +786,7 @@ void MainWindow::updateVocabulary()
QApplication::processEvents(); QApplication::processEvents();
QTime time; QTime time;
time.start();
findObject_->updateVocabulary(); findObject_->updateVocabulary();
if(findObject_->vocabulary()->size()) if(findObject_->vocabulary()->size())
@ -946,9 +947,13 @@ void MainWindow::update(const cv::Mat & image)
if(findObject_->detect(sceneImage_, info)) if(findObject_->detect(sceneImage_, info))
{ {
ui_->label_timeDetection->setNum(info.timeStamps_.value(DetectionInfo::kTimeKeypointDetection, 0)); ui_->label_timeDetection->setNum(info.timeStamps_.value(DetectionInfo::kTimeKeypointDetection, 0));
ui_->label_timeSkewAffine->setNum(info.timeStamps_.value(DetectionInfo::kTimeSkewAffine, 0));
ui_->label_timeExtraction->setNum(info.timeStamps_.value(DetectionInfo::kTimeDescriptorExtraction, 0)); ui_->label_timeExtraction->setNum(info.timeStamps_.value(DetectionInfo::kTimeDescriptorExtraction, 0));
ui_->imageView_source->setData(info.sceneKeypoints_, cvtCvMat2QImage(sceneImage_)); ui_->imageView_source->setData(info.sceneKeypoints_, cvtCvMat2QImage(sceneImage_));
ui_->label_timeIndexing->setNum(info.timeStamps_.value(DetectionInfo::kTimeIndexing, 0)); if(!findObject_->vocabulary()->size())
{
ui_->label_timeIndexing->setNum(info.timeStamps_.value(DetectionInfo::kTimeIndexing, 0));
}
ui_->label_timeMatching->setNum(info.timeStamps_.value(DetectionInfo::kTimeMatching, 0)); ui_->label_timeMatching->setNum(info.timeStamps_.value(DetectionInfo::kTimeMatching, 0));
ui_->label_timeHomographies->setNum(info.timeStamps_.value(DetectionInfo::kTimeHomography, 0)); ui_->label_timeHomographies->setNum(info.timeStamps_.value(DetectionInfo::kTimeHomography, 0));
@ -1135,7 +1140,13 @@ void MainWindow::update(const cv::Mat & image)
} }
else else
{ {
this->statusBar()->showMessage(tr("Cannot search, objects must be updated!")); if(findObject_->vocabulary()->size())
{
this->statusBar()->showMessage(tr("Cannot search, objects must be updated!"));
}
ui_->label_timeDetection->setNum(info.timeStamps_.value(DetectionInfo::kTimeKeypointDetection, 0));
ui_->label_timeSkewAffine->setNum(info.timeStamps_.value(DetectionInfo::kTimeSkewAffine, 0));
ui_->label_timeExtraction->setNum(info.timeStamps_.value(DetectionInfo::kTimeDescriptorExtraction, 0));
ui_->imageView_source->setData(info.sceneKeypoints_, cvtCvMat2QImage(sceneImage_)); ui_->imageView_source->setData(info.sceneKeypoints_, cvtCvMat2QImage(sceneImage_));
} }

View File

@ -38,8 +38,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <opencv2/nonfree/gpu.hpp> #include <opencv2/nonfree/gpu.hpp>
#include <opencv2/gpu/gpu.hpp> #include <opencv2/gpu/gpu.hpp>
#define VERBOSE 0
namespace find_object { namespace find_object {
ParametersMap Settings::defaultParameters_; ParametersMap Settings::defaultParameters_;
@ -199,7 +197,8 @@ public:
virtual ~GPUFeature2D() {} virtual ~GPUFeature2D() {}
virtual void detectKeypoints(const cv::Mat & image, virtual void detectKeypoints(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints) = 0; std::vector<cv::KeyPoint> & keypoints,
const cv::Mat & mask = cv::Mat()) = 0;
virtual void computeDescriptors(const cv::Mat & image, virtual void computeDescriptors(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints, std::vector<cv::KeyPoint> & keypoints,
@ -225,12 +224,15 @@ public:
} }
virtual ~GPUSURF() {} virtual ~GPUSURF() {}
void detectKeypoints(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints) void detectKeypoints(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
const cv::Mat & mask = cv::Mat())
{ {
cv::gpu::GpuMat imgGpu(image); cv::gpu::GpuMat imgGpu(image);
cv::gpu::GpuMat maskGpu(mask);
try try
{ {
surf_(imgGpu, cv::gpu::GpuMat(), keypoints); surf_(imgGpu, maskGpu, keypoints);
} }
catch(cv::Exception &e) catch(cv::Exception &e)
{ {
@ -292,10 +294,13 @@ public:
virtual ~GPUFAST() {} virtual ~GPUFAST() {}
protected: protected:
void detectKeypoints(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints) void detectKeypoints(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
const cv::Mat & mask = cv::Mat())
{ {
cv::gpu::GpuMat imgGpu(image); cv::gpu::GpuMat imgGpu(image);
fast_(imgGpu, cv::gpu::GpuMat(), keypoints); cv::gpu::GpuMat maskGpu(mask);
fast_(imgGpu, maskGpu, keypoints);
} }
void computeDescriptors( const cv::Mat& image, void computeDescriptors( const cv::Mat& image,
std::vector<cv::KeyPoint>& keypoints, std::vector<cv::KeyPoint>& keypoints,
@ -335,12 +340,15 @@ public:
virtual ~GPUORB() {} virtual ~GPUORB() {}
protected: protected:
void detectKeypoints(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints) void detectKeypoints(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
const cv::Mat & mask = cv::Mat())
{ {
cv::gpu::GpuMat imgGpu(image); cv::gpu::GpuMat imgGpu(image);
cv::gpu::GpuMat maskGpu(mask);
try try
{ {
orb_(imgGpu, cv::gpu::GpuMat(), keypoints); orb_(imgGpu, maskGpu, keypoints);
} }
catch(cv::Exception &e) catch(cv::Exception &e)
{ {
@ -411,7 +419,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_Dense_initImgBound(), getFeature2D_Dense_initImgBound(),
getFeature2D_Dense_varyXyStepWithScale(), getFeature2D_Dense_varyXyStepWithScale(),
getFeature2D_Dense_varyImgBoundWithScale()); getFeature2D_Dense_varyImgBoundWithScale());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 1: case 1:
@ -422,14 +430,14 @@ KeypointDetector * Settings::createKeypointDetector()
detectorGPU = new GPUFAST( detectorGPU = new GPUFAST(
getFeature2D_Fast_threshold(), getFeature2D_Fast_threshold(),
getFeature2D_Fast_nonmaxSuppression()); getFeature2D_Fast_nonmaxSuppression());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s GPU\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s GPU", strategies.at(index).toStdString().c_str());
} }
else else
{ {
detector = new cv::FastFeatureDetector( detector = new cv::FastFeatureDetector(
getFeature2D_Fast_threshold(), getFeature2D_Fast_threshold(),
getFeature2D_Fast_nonmaxSuppression()); getFeature2D_Fast_nonmaxSuppression());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
} }
break; break;
@ -443,7 +451,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_GFTT_blockSize(), getFeature2D_GFTT_blockSize(),
getFeature2D_GFTT_useHarrisDetector(), getFeature2D_GFTT_useHarrisDetector(),
getFeature2D_GFTT_k()); getFeature2D_GFTT_k());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 3: case 3:
@ -459,7 +467,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_MSER_areaThreshold(), getFeature2D_MSER_areaThreshold(),
getFeature2D_MSER_minMargin(), getFeature2D_MSER_minMargin(),
getFeature2D_MSER_edgeBlurSize()); getFeature2D_MSER_edgeBlurSize());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 4: case 4:
@ -478,7 +486,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_ORB_patchSize(), getFeature2D_ORB_patchSize(),
getFeature2D_Fast_threshold(), getFeature2D_Fast_threshold(),
getFeature2D_Fast_nonmaxSuppression()); getFeature2D_Fast_nonmaxSuppression());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s (GPU)\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
} }
else else
{ {
@ -491,7 +499,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_ORB_WTA_K(), getFeature2D_ORB_WTA_K(),
getFeature2D_ORB_scoreType(), getFeature2D_ORB_scoreType(),
getFeature2D_ORB_patchSize()); getFeature2D_ORB_patchSize());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
} }
break; break;
@ -504,7 +512,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_SIFT_contrastThreshold(), getFeature2D_SIFT_contrastThreshold(),
getFeature2D_SIFT_edgeThreshold(), getFeature2D_SIFT_edgeThreshold(),
getFeature2D_SIFT_sigma()); getFeature2D_SIFT_sigma());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 6: case 6:
@ -516,7 +524,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_Star_lineThresholdProjected(), getFeature2D_Star_lineThresholdProjected(),
getFeature2D_Star_lineThresholdBinarized(), getFeature2D_Star_lineThresholdBinarized(),
getFeature2D_Star_suppressNonmaxSize()); getFeature2D_Star_suppressNonmaxSize());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 7: case 7:
@ -531,7 +539,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_SURF_extended(), getFeature2D_SURF_extended(),
getFeature2D_SURF_keypointsRatio(), getFeature2D_SURF_keypointsRatio(),
getFeature2D_SURF_upright()); getFeature2D_SURF_upright());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s (GPU)\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
} }
else else
{ {
@ -541,7 +549,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_SURF_nOctaveLayers(), getFeature2D_SURF_nOctaveLayers(),
getFeature2D_SURF_extended(), getFeature2D_SURF_extended(),
getFeature2D_SURF_upright()); getFeature2D_SURF_upright());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
} }
break; break;
@ -552,7 +560,7 @@ KeypointDetector * Settings::createKeypointDetector()
getFeature2D_BRISK_thresh(), getFeature2D_BRISK_thresh(),
getFeature2D_BRISK_octaves(), getFeature2D_BRISK_octaves(),
getFeature2D_BRISK_patternScale()); getFeature2D_BRISK_patternScale());
if(VERBOSE)printf("Settings::createFeaturesDetector() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
default: default:
@ -595,7 +603,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
{ {
extractor = new cv::BriefDescriptorExtractor( extractor = new cv::BriefDescriptorExtractor(
getFeature2D_Brief_bytes()); getFeature2D_Brief_bytes());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 1: case 1:
@ -614,7 +622,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
getFeature2D_ORB_patchSize(), getFeature2D_ORB_patchSize(),
getFeature2D_Fast_threshold(), getFeature2D_Fast_threshold(),
getFeature2D_Fast_nonmaxSuppression()); getFeature2D_Fast_nonmaxSuppression());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s (GPU)\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
} }
else else
{ {
@ -627,7 +635,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
getFeature2D_ORB_WTA_K(), getFeature2D_ORB_WTA_K(),
getFeature2D_ORB_scoreType(), getFeature2D_ORB_scoreType(),
getFeature2D_ORB_patchSize()); getFeature2D_ORB_patchSize());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
} }
break; break;
@ -640,7 +648,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
getFeature2D_SIFT_contrastThreshold(), getFeature2D_SIFT_contrastThreshold(),
getFeature2D_SIFT_edgeThreshold(), getFeature2D_SIFT_edgeThreshold(),
getFeature2D_SIFT_sigma()); getFeature2D_SIFT_sigma());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 3: case 3:
@ -655,7 +663,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
getFeature2D_SURF_extended(), getFeature2D_SURF_extended(),
getFeature2D_SURF_keypointsRatio(), getFeature2D_SURF_keypointsRatio(),
getFeature2D_SURF_upright()); getFeature2D_SURF_upright());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s (GPU)\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
} }
else else
{ {
@ -665,7 +673,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
getFeature2D_SURF_nOctaveLayers(), getFeature2D_SURF_nOctaveLayers(),
getFeature2D_SURF_extended(), getFeature2D_SURF_extended(),
getFeature2D_SURF_upright()); getFeature2D_SURF_upright());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
} }
break; break;
@ -676,7 +684,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
getFeature2D_BRISK_thresh(), getFeature2D_BRISK_thresh(),
getFeature2D_BRISK_octaves(), getFeature2D_BRISK_octaves(),
getFeature2D_BRISK_patternScale()); getFeature2D_BRISK_patternScale());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
case 5: case 5:
@ -687,7 +695,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor()
getFeature2D_FREAK_scaleNormalized(), getFeature2D_FREAK_scaleNormalized(),
getFeature2D_FREAK_patternScale(), getFeature2D_FREAK_patternScale(),
getFeature2D_FREAK_nOctaves()); getFeature2D_FREAK_nOctaves());
if(VERBOSE)printf("Settings::createDescriptorsExtractor() type=%s\n", strategies.at(index).toStdString().c_str()); UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
} }
break; break;
default: default:
@ -745,14 +753,14 @@ cv::flann::IndexParams * Settings::createFlannIndexParams()
case 0: case 0:
if(strategies.at(index).compare("Linear") == 0) if(strategies.at(index).compare("Linear") == 0)
{ {
if(VERBOSE)printf("Settings::getFlannIndexParams() type=%s\n", "Linear"); UDEBUG("type=%s", "Linear");
params = new cv::flann::LinearIndexParams(); params = new cv::flann::LinearIndexParams();
} }
break; break;
case 1: case 1:
if(strategies.at(index).compare("KDTree") == 0) if(strategies.at(index).compare("KDTree") == 0)
{ {
if(VERBOSE)printf("Settings::getFlannIndexParams() type=%s\n", "KDTree"); UDEBUG("type=%s", "KDTree");
params = new cv::flann::KDTreeIndexParams( params = new cv::flann::KDTreeIndexParams(
getNearestNeighbor_KDTree_trees()); getNearestNeighbor_KDTree_trees());
} }
@ -772,7 +780,7 @@ cv::flann::IndexParams * Settings::createFlannIndexParams()
centers_init = (cvflann::flann_centers_init_t)index; centers_init = (cvflann::flann_centers_init_t)index;
} }
} }
if(VERBOSE)printf("Settings::getFlannIndexParams() type=%s\n", "KMeans"); UDEBUG("type=%s", "KMeans");
params = new cv::flann::KMeansIndexParams( params = new cv::flann::KMeansIndexParams(
getNearestNeighbor_KMeans_branching(), getNearestNeighbor_KMeans_branching(),
getNearestNeighbor_KMeans_iterations(), getNearestNeighbor_KMeans_iterations(),
@ -795,7 +803,7 @@ cv::flann::IndexParams * Settings::createFlannIndexParams()
centers_init = (cvflann::flann_centers_init_t)index; centers_init = (cvflann::flann_centers_init_t)index;
} }
} }
if(VERBOSE)printf("Settings::getFlannIndexParams() type=%s\n", "Composite"); UDEBUG("type=%s", "Composite");
params = new cv::flann::CompositeIndexParams( params = new cv::flann::CompositeIndexParams(
getNearestNeighbor_Composite_trees(), getNearestNeighbor_Composite_trees(),
getNearestNeighbor_Composite_branching(), getNearestNeighbor_Composite_branching(),
@ -807,7 +815,7 @@ cv::flann::IndexParams * Settings::createFlannIndexParams()
case 4: case 4:
if(strategies.at(index).compare("Autotuned") == 0) if(strategies.at(index).compare("Autotuned") == 0)
{ {
if(VERBOSE)printf("Settings::getFlannIndexParams() type=%s\n", "Autotuned"); UDEBUG("type=%s", "Autotuned");
params = new cv::flann::AutotunedIndexParams( params = new cv::flann::AutotunedIndexParams(
getNearestNeighbor_Autotuned_target_precision(), getNearestNeighbor_Autotuned_target_precision(),
getNearestNeighbor_Autotuned_build_weight(), getNearestNeighbor_Autotuned_build_weight(),
@ -818,7 +826,7 @@ cv::flann::IndexParams * Settings::createFlannIndexParams()
case 5: case 5:
if(strategies.at(index).compare("Lsh") == 0) if(strategies.at(index).compare("Lsh") == 0)
{ {
if(VERBOSE)printf("Settings::getFlannIndexParams() type=%s\n", "Lsh"); UDEBUG("type=%s", "Lsh");
params = new cv::flann::LshIndexParams( params = new cv::flann::LshIndexParams(
getNearestNeighbor_Lsh_table_number(), getNearestNeighbor_Lsh_table_number(),
getNearestNeighbor_Lsh_key_size(), getNearestNeighbor_Lsh_key_size(),
@ -858,7 +866,6 @@ cvflann::flann_distance_t Settings::getFlannDistanceType()
} }
} }
} }
if(VERBOSE)printf("Settings::getFlannDistanceType() distance=%d\n", distance);
return distance; return distance;
} }
@ -896,7 +903,7 @@ int Settings::getHomographyMethod()
} }
} }
} }
if(VERBOSE)printf("Settings::getHomographyMethod() method=%d\n", method); UDEBUG("method=%d", method);
return method; return method;
} }
@ -912,15 +919,17 @@ KeypointDetector::KeypointDetector(GPUFeature2D * gpuFeature2D) :
{ {
Q_ASSERT(gpuFeature2D_!=0); Q_ASSERT(gpuFeature2D_!=0);
} }
void KeypointDetector::detect(const cv::Mat & image, std::vector<cv::KeyPoint> & keypoints) void KeypointDetector::detect(const cv::Mat & image,
std::vector<cv::KeyPoint> & keypoints,
const cv::Mat & mask)
{ {
if(featureDetector_) if(featureDetector_)
{ {
featureDetector_->detect(image, keypoints); featureDetector_->detect(image, keypoints, mask);
} }
else // assume GPU else // assume GPU
{ {
gpuFeature2D_->detectKeypoints(image, keypoints); gpuFeature2D_->detectKeypoints(image, keypoints, mask);
} }
} }

View File

@ -7,7 +7,7 @@
<x>0</x> <x>0</x>
<y>0</y> <y>0</y>
<width>826</width> <width>826</width>
<height>506</height> <height>523</height>
</rect> </rect>
</property> </property>
<property name="windowTitle"> <property name="windowTitle">
@ -344,7 +344,7 @@
<x>0</x> <x>0</x>
<y>0</y> <y>0</y>
<width>198</width> <width>198</width>
<height>376</height> <height>393</height>
</rect> </rect>
</property> </property>
<layout class="QVBoxLayout" name="verticalLayout_objects"> <layout class="QVBoxLayout" name="verticalLayout_objects">
@ -449,83 +449,83 @@
<number>0</number> <number>0</number>
</property> </property>
<item row="4" column="1"> <item row="4" column="1">
<widget class="QLabel" name="label_timeMatching">
<property name="text">
<string>000</string>
</property>
</widget>
</item>
<item row="3" column="2">
<widget class="QLabel" name="label_9">
<property name="text">
<string>ms</string>
</property>
</widget>
</item>
<item row="4" column="2">
<widget class="QLabel" name="label_10">
<property name="text">
<string>ms</string>
</property>
</widget>
</item>
<item row="3" column="0">
<widget class="QLabel" name="label_8">
<property name="text">
<string>Descriptors indexing</string>
</property>
</widget>
</item>
<item row="3" column="1">
<widget class="QLabel" name="label_timeIndexing"> <widget class="QLabel" name="label_timeIndexing">
<property name="text"> <property name="text">
<string>000</string> <string>000</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="5" column="1">
<widget class="QLabel" name="label_timeMatching">
<property name="text">
<string>000</string>
</property>
</widget>
</item>
<item row="4" column="2">
<widget class="QLabel" name="label_9">
<property name="text">
<string>ms</string>
</property>
</widget>
</item>
<item row="5" column="2"> <item row="5" column="2">
<widget class="QLabel" name="label_10">
<property name="text">
<string>ms</string>
</property>
</widget>
</item>
<item row="4" column="0">
<widget class="QLabel" name="label_8">
<property name="text">
<string>Descriptors indexing</string>
</property>
</widget>
</item>
<item row="6" column="2">
<widget class="QLabel" name="label_12"> <widget class="QLabel" name="label_12">
<property name="text"> <property name="text">
<string>ms</string> <string>ms</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="7" column="1"> <item row="8" column="1">
<widget class="QLabel" name="label_minMatchedDistance"> <widget class="QLabel" name="label_minMatchedDistance">
<property name="text"> <property name="text">
<string>000</string> <string>000</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="7" column="0"> <item row="8" column="0">
<widget class="QLabel" name="label_13"> <widget class="QLabel" name="label_13">
<property name="text"> <property name="text">
<string>Min matched distance</string> <string>Min matched distance</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="8" column="0"> <item row="9" column="0">
<widget class="QLabel" name="label_14"> <widget class="QLabel" name="label_14">
<property name="text"> <property name="text">
<string>Max matched distance</string> <string>Max matched distance</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="8" column="1"> <item row="9" column="1">
<widget class="QLabel" name="label_maxMatchedDistance"> <widget class="QLabel" name="label_maxMatchedDistance">
<property name="text"> <property name="text">
<string>000</string> <string>000</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="5" column="0"> <item row="6" column="0">
<widget class="QLabel" name="label_11"> <widget class="QLabel" name="label_11">
<property name="text"> <property name="text">
<string>Homograhies</string> <string>Homograhies</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="2" column="0"> <item row="3" column="0">
<widget class="QLabel" name="label_2"> <widget class="QLabel" name="label_2">
<property name="text"> <property name="text">
<string>Descriptors extraction</string> <string>Descriptors extraction</string>
@ -539,14 +539,14 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="2" column="1"> <item row="3" column="1">
<widget class="QLabel" name="label_timeExtraction"> <widget class="QLabel" name="label_timeExtraction">
<property name="text"> <property name="text">
<string>000</string> <string>000</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="2" column="2"> <item row="3" column="2">
<widget class="QLabel" name="label_4"> <widget class="QLabel" name="label_4">
<property name="text"> <property name="text">
<string>ms</string> <string>ms</string>
@ -567,7 +567,7 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="4" column="0"> <item row="5" column="0">
<widget class="QLabel" name="label_7"> <widget class="QLabel" name="label_7">
<property name="text"> <property name="text">
<string>Descriptors matching</string> <string>Descriptors matching</string>
@ -595,80 +595,101 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="9" column="0"> <item row="10" column="0">
<widget class="QLabel" name="label_17"> <widget class="QLabel" name="label_17">
<property name="text"> <property name="text">
<string>Vocabulary size</string> <string>Vocabulary size</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="9" column="1"> <item row="10" column="1">
<widget class="QLabel" name="label_vocabularySize"> <widget class="QLabel" name="label_vocabularySize">
<property name="text"> <property name="text">
<string>000</string> <string>000</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="10" column="0"> <item row="11" column="0">
<widget class="QLabel" name="label_18"> <widget class="QLabel" name="label_18">
<property name="text"> <property name="text">
<string>IP address</string> <string>IP address</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="11" column="0"> <item row="12" column="0">
<widget class="QLabel" name="label_19"> <widget class="QLabel" name="label_19">
<property name="text"> <property name="text">
<string>Output detection port</string> <string>Output detection port</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="10" column="1"> <item row="11" column="1">
<widget class="QLabel" name="label_ipAddress"> <widget class="QLabel" name="label_ipAddress">
<property name="text"> <property name="text">
<string>0.0.0.0</string> <string>0.0.0.0</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="11" column="1"> <item row="12" column="1">
<widget class="QLabel" name="label_port"> <widget class="QLabel" name="label_port">
<property name="text"> <property name="text">
<string>0</string> <string>0</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="6" column="0"> <item row="7" column="0">
<widget class="QLabel" name="label_20"> <widget class="QLabel" name="label_20">
<property name="text"> <property name="text">
<string>Objects detected</string> <string>Objects detected</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="6" column="1"> <item row="7" column="1">
<widget class="QLabel" name="label_objectsDetected"> <widget class="QLabel" name="label_objectsDetected">
<property name="text"> <property name="text">
<string>000</string> <string>000</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="5" column="1"> <item row="6" column="1">
<widget class="QLabel" name="label_timeHomographies"> <widget class="QLabel" name="label_timeHomographies">
<property name="text"> <property name="text">
<string>000</string> <string>000</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="12" column="0"> <item row="13" column="0">
<widget class="QLabel" name="label_21"> <widget class="QLabel" name="label_21">
<property name="text"> <property name="text">
<string>Input image port</string> <string>Input image port</string>
</property> </property>
</widget> </widget>
</item> </item>
<item row="12" column="1"> <item row="13" column="1">
<widget class="QLabel" name="label_port_image"> <widget class="QLabel" name="label_port_image">
<property name="text"> <property name="text">
<string>NA</string> <string>-</string>
</property>
</widget>
</item>
<item row="2" column="0">
<widget class="QLabel" name="label_22">
<property name="text">
<string>Affine transforms</string>
</property>
</widget>
</item>
<item row="2" column="1">
<widget class="QLabel" name="label_timeSkewAffine">
<property name="text">
<string>000</string>
</property>
</widget>
</item>
<item row="2" column="2">
<widget class="QLabel" name="label_23">
<property name="text">
<string>ms</string>
</property> </property>
</widget> </widget>
</item> </item>