From b07239a0003e67dba1ecef5eff126f2f352eba7f Mon Sep 17 00:00:00 2001 From: matlabbe Date: Tue, 23 Jun 2015 16:38:47 -0400 Subject: [PATCH] Added OpenCV 3 support (CUDA module not yet tested) --- CMakeLists.txt | 26 +- example/main.cpp | 61 +++-- include/find_object/Settings.h | 59 +++- src/AddObjectDialog.cpp | 2 +- src/FindObject.cpp | 10 + src/MainWindow.cpp | 11 +- src/ParametersToolBox.cpp | 42 ++- src/Settings.cpp | 485 ++++++++++++++++++++++++++++----- src/Vocabulary.cpp | 35 ++- tools/similarity/main.cpp | 27 +- 10 files changed, 632 insertions(+), 126 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 19c1539c..b565251c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -31,11 +31,11 @@ FIND_PACKAGE(OpenCV REQUIRED) # tested on 2.3.1 FIND_PACKAGE(Qt4 COMPONENTS QtCore QtGui QtNetwork REQUIRED) ADD_DEFINITIONS(-DQT_NO_KEYWORDS) # To avoid conflicts with boost signals used in ROS -IF(OPENCV_NONFREE_FOUND) - SET(NONFREE 1) -ELSE() - SET(NONFREE 0) -ENDIF() +SET(NONFREE 0) +IF(OPENCV_NONFREE_FOUND OR OPENCV_XFEATURES2D_FOUND) + SET(NONFREE 1) +ENDIF(OPENCV_NONFREE_FOUND OR OPENCV_XFEATURES2D_FOUND) + CONFIGURE_FILE(Version.h.in ${PROJECT_SOURCE_DIR}/include/${PROJECT_PREFIX}/Version.h) IF(NOT CATKIN_BUILD) @@ -232,11 +232,21 @@ IF(NOT CATKIN_BUILD) MESSAGE(STATUS "Info :") MESSAGE(STATUS " CMAKE_INSTALL_PREFIX = ${CMAKE_INSTALL_PREFIX}") MESSAGE(STATUS " CMAKE_BUILD_TYPE = ${CMAKE_BUILD_TYPE}") - IF(OPENCV_NONFREE_FOUND) - MESSAGE(STATUS " With OpenCV nonfree module (SIFT/SURF) = YES") + + IF(OpenCV_VERSION_MAJOR EQUAL 2) + IF(OPENCV_NONFREE_FOUND) + MESSAGE(STATUS " With OpenCV 2 nonfree module (SIFT/SURF) = YES") + ELSE() + MESSAGE(STATUS " With OpenCV 2 nonfree module (SIFT/SURF) = NO (not found)") + ENDIF() ELSE() - MESSAGE(STATUS " With OpenCV nonfree module (SIFT/SURF) = NO (not found)") + IF(OPENCV_XFEATURES2D_FOUND) + MESSAGE(STATUS " With OpenCV 3 xfeatures2d module (SIFT/SURF/BRIEF/FREAK) = YES") + ELSE() + MESSAGE(STATUS " With OpenCV 3 xfeatures2d module (SIFT/SURF/BRIEF/FREAK) = NO (not found)") + ENDIF() ENDIF() + IF(APPLE) MESSAGE(STATUS " BUILD_AS_BUNDLE = ${BUILD_AS_BUNDLE}") ENDIF(APPLE) diff --git a/example/main.cpp b/example/main.cpp index 7bace09f..65e4494f 100644 --- a/example/main.cpp +++ b/example/main.cpp @@ -39,9 +39,22 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include #include #include -#include #include // for homography +#include + +#ifdef HAVE_OPENCV_NONFREE + #if CV_MAJOR_VERSION == 2 && CV_MINOR_VERSION >=4 + #include + #include + #endif +#endif +#ifdef HAVE_OPENCV_XFEATURES2D + #include + #include +#endif + + // From this project #include "find_object/ObjWidget.h" #include "find_object/QtOpenCV.h" @@ -85,16 +98,21 @@ int main(int argc, char * argv[]) //////////////////////////// // EXTRACT KEYPOINTS //////////////////////////// + cv::Ptr detector; // The detector can be any of (see OpenCV features2d.hpp): - // cv::FeatureDetector * detector = new cv::DenseFeatureDetector(); - // cv::FeatureDetector * detector = new cv::FastFeatureDetector(); - // cv::FeatureDetector * detector = new cv::GFTTDetector(); - // cv::FeatureDetector * detector = new cv::MSER(); - // cv::FeatureDetector * detector = new cv::ORB(); - cv::FeatureDetector * detector = new cv::SIFT(); - // cv::FeatureDetector * detector = new cv::StarFeatureDetector(); - // cv::FeatureDetector * detector = new cv::SURF(600.0); - // cv::FeatureDetector * detector = new cv::BRISK(); +#if CV_MAJOR_VERSION == 2 + // detector = cv::Ptr(new cv::DenseFeatureDetector()); + // detector = cv::Ptr(new cv::FastFeatureDetector()); + // detector = cv::Ptr(new cv::GFTTDetector()); + // detector = cv::Ptr(new cv::MSER()); + // detector = cv::Ptr(new cv::ORB()); + detector = cv::Ptr(new cv::SIFT()); + // detector = cv::Ptr(new cv::StarFeatureDetector()); + // detector = cv::Ptr(new cv::SURF(600.0)); + // detector = cv::Ptr(new cv::BRISK()); +#else + detector = cv::xfeatures2d::SIFT::create(); +#endif detector->detect(objectImg, objectKeypoints); printf("Object: %d keypoints detected in %d ms\n", (int)objectKeypoints.size(), time.restart()); detector->detect(sceneImg, sceneKeypoints); @@ -103,13 +121,18 @@ int main(int argc, char * argv[]) //////////////////////////// // EXTRACT DESCRIPTORS //////////////////////////// + cv::Ptr extractor; +#if CV_MAJOR_VERSION == 2 // The extractor can be any of (see OpenCV features2d.hpp): - // cv::DescriptorExtractor * extractor = new cv::BriefDescriptorExtractor(); - // cv::DescriptorExtractor * extractor = new cv::ORB(); - cv::DescriptorExtractor * extractor = new cv::SIFT(); - // cv::DescriptorExtractor * extractor = new cv::SURF(600.0); - // cv::DescriptorExtractor * extractor = new cv::BRISK(); - // cv::DescriptorExtractor * extractor = new cv::FREAK(); + // extractor = cv::Ptr(new cv::BriefDescriptorExtractor()); + // extractor = cv::Ptr(new cv::ORB()); + extractor = cv::Ptr(new cv::SIFT()); + // extractor = cv::Ptr(new cv::SURF(600.0)); + // extractor = cv::Ptr(new cv::BRISK()); + // extractor = cv::Ptr(new cv::FREAK()); +#else + extractor = cv::xfeatures2d::SIFT::create(); +#endif extractor->compute(objectImg, objectKeypoints, objectDescriptors); printf("Object: %d descriptors extracted in %d ms\n", objectDescriptors.rows, time.restart()); extractor->compute(sceneImg, sceneKeypoints, sceneDescriptors); @@ -309,12 +332,6 @@ int main(int argc, char * argv[]) int r = app.exec(); printf("Closing...\n"); - //////////////////////////// - //Cleanup - //////////////////////////// - delete detector; - delete extractor; - return r; } else diff --git a/include/find_object/Settings.h b/include/find_object/Settings.h index dd5245f4..f0579ad7 100644 --- a/include/find_object/Settings.h +++ b/include/find_object/Settings.h @@ -115,8 +115,8 @@ class FINDOBJECT_EXP Settings //List format : [Index:item0;item1;item3;...] - PARAMETER_COND(Feature2D, 1Detector, QString, FINDOBJECT_NONFREE, "7:Dense;Fast;GFTT;MSER;ORB;SIFT;Star;SURF;BRISK" , "2:Dense;Fast;GFTT;MSER;ORB;SIFT;Star;SURF;BRISK", "Keypoint detector."); - PARAMETER_COND(Feature2D, 2Descriptor, QString, FINDOBJECT_NONFREE, "3:Brief;ORB;SIFT;SURF;BRISK;FREAK", "5:Brief;ORB;SIFT;SURF;BRISK;FREAK", "Keypoint descriptor."); + PARAMETER_COND(Feature2D, 1Detector, QString, FINDOBJECT_NONFREE, "7:Dense;Fast;GFTT;MSER;ORB;SIFT;Star;SURF;BRISK;AGAST;KAZE;AKAZE" , "4:Dense;Fast;GFTT;MSER;ORB;SIFT;Star;SURF;BRISK;AGAST;KAZE;AKAZE", "Keypoint detector."); + PARAMETER_COND(Feature2D, 2Descriptor, QString, FINDOBJECT_NONFREE, "3:Brief;ORB;SIFT;SURF;BRISK;FREAK;KAZE;AKAZE;LUCID;LATCH;DAISY", "1:Brief;ORB;SIFT;SURF;BRISK;FREAK;KAZE;AKAZE;LUCID;LATCH;DAISY", "Keypoint descriptor."); PARAMETER(Feature2D, 3MaxFeatures, int, 0, "Maximum features per image. If the number of features extracted is over this threshold, only X features with the highest response are kept. 0 means all features are kept."); PARAMETER(Feature2D, 4Affine, bool, false, "(ASIFT) Extract features on multiple affine transformations of the image."); PARAMETER(Feature2D, 5AffineCount, int, 6, "(ASIFT) Higher the value, more affine transformations will be done."); @@ -127,6 +127,7 @@ class FINDOBJECT_EXP Settings PARAMETER(Feature2D, Brief_bytes, int, 32, "Bytes is a length of descriptor in bytes. It can be equal 16, 32 or 64 bytes."); +#if CV_MAJOR_VERSION < 3 PARAMETER(Feature2D, Dense_initFeatureScale, float, 1.f, ""); PARAMETER(Feature2D, Dense_featureScaleLevels, int, 1, ""); PARAMETER(Feature2D, Dense_featureScaleMul, float, 0.1f, ""); @@ -134,18 +135,34 @@ class FINDOBJECT_EXP Settings PARAMETER(Feature2D, Dense_initImgBound, int, 0, ""); PARAMETER(Feature2D, Dense_varyXyStepWithScale, bool, true, ""); PARAMETER(Feature2D, Dense_varyImgBoundWithScale, bool, false, ""); +#endif PARAMETER(Feature2D, Fast_threshold, int, 10, "Threshold on difference between intensity of the central pixel and pixels of a circle around this pixel."); PARAMETER(Feature2D, Fast_nonmaxSuppression, bool, true, "If true, non-maximum suppression is applied to detected corners (keypoints)."); PARAMETER(Feature2D, Fast_gpu, bool, false, "GPU-FAST: Use GPU version of FAST. This option is enabled only if OpenCV is built with CUDA and GPUs are detected."); PARAMETER(Feature2D, Fast_keypointsRatio, double, 0.05, "Used with FAST GPU."); - PARAMETER(Feature2D, GFTT_maxCorners, int, 1000, ""); - PARAMETER(Feature2D, GFTT_qualityLevel, double, 0.01, ""); - PARAMETER(Feature2D, GFTT_minDistance, double, 1, ""); - PARAMETER(Feature2D, GFTT_blockSize, int, 3, ""); - PARAMETER(Feature2D, GFTT_useHarrisDetector, bool, false, ""); - PARAMETER(Feature2D, GFTT_k, double, 0.04, ""); + PARAMETER(Feature2D, AGAST_threshold, int, 10, "Threshold on difference between intensity of the central pixel and pixels of a circle around this pixel."); + PARAMETER(Feature2D, AGAST_nonmaxSuppression, bool, true, "If true, non-maximum suppression is applied to detected corners (keypoints)."); + + PARAMETER(Feature2D, KAZE_extended, bool, false, "Set to enable extraction of extended (128-byte) descriptor."); + PARAMETER(Feature2D, KAZE_upright, bool, false, "Set to enable use of upright descriptors (non rotation-invariant)."); + PARAMETER(Feature2D, KAZE_threshold, float, 0.001f, "Detector response threshold to accept point"); + PARAMETER(Feature2D, KAZE_nOctaves, int, 4, "Maximum octave evolution of the image."); + PARAMETER(Feature2D, KAZE_nOctaveLayers, int, 4, "Default number of sublevels per scale level."); + + PARAMETER(Feature2D, AKAZE_descriptorSize, int, 0, "Size of the descriptor in bits. 0 -> Full size."); + PARAMETER(Feature2D, AKAZE_descriptorChannels, int, 3, "Number of channels in the descriptor (1, 2, 3)."); + PARAMETER(Feature2D, AKAZE_threshold, float, 0.001f, "Detector response threshold to accept point."); + PARAMETER(Feature2D, AKAZE_nOctaves, int, 4, "Maximum octave evolution of the image."); + PARAMETER(Feature2D, AKAZE_nOctaveLayers, int, 4, "Default number of sublevels per scale level."); + + PARAMETER(Feature2D, GFTT_maxCorners, int, 1000, "Maximum number of corners to return. If there are more corners than are found, the strongest of them is returned."); + PARAMETER(Feature2D, GFTT_qualityLevel, double, 0.01, "Parameter characterizing the minimal accepted quality of image corners. The parameter value is multiplied by the best corner quality measure, which is the minimal eigenvalue (see cornerMinEigenVal ) or the Harris function response (see cornerHarris ). The corners with the quality measure less than the product are rejected. For example, if the best corner has the quality measure = 1500, and the qualityLevel=0.01 , then all the corners with the quality measure less than 15 are rejected."); + PARAMETER(Feature2D, GFTT_minDistance, double, 1, "Minimum possible Euclidean distance between the returned corners."); + PARAMETER(Feature2D, GFTT_blockSize, int, 3, "Size of an average block for computing a derivative covariation matrix over each pixel neighborhood. See cornerEigenValsAndVecs."); + PARAMETER(Feature2D, GFTT_useHarrisDetector, bool, false, "Parameter indicating whether to use a Harris detector (see cornerHarris) or cornerMinEigenVal."); + PARAMETER(Feature2D, GFTT_k, double, 0.04, "Free parameter of the Harris detector."); PARAMETER(Feature2D, ORB_nFeatures, int, 500, "The maximum number of features to retain."); PARAMETER(Feature2D, ORB_scaleFactor, float, 1.2f, "Pyramid decimation ratio, greater than 1. scaleFactor==2 means the classical pyramid, where each next level has 4x less pixels than the previous, but such a big scale factor will degrade feature matching scores dramatically. On the other hand, too close to 1 scale factor will mean that to cover certain scale range you will need more pyramid levels and so the speed will suffer."); @@ -167,7 +184,6 @@ class FINDOBJECT_EXP Settings PARAMETER(Feature2D, MSER_minMargin, double, 0.003, ""); PARAMETER(Feature2D, MSER_edgeBlurSize, int, 5, ""); -#if FINDOBJECT_NONFREE == 1 PARAMETER(Feature2D, SIFT_nfeatures, int, 0, "The number of best features to retain. The features are ranked by their scores (measured in SIFT algorithm as the local contrast)."); PARAMETER(Feature2D, SIFT_nOctaveLayers, int, 3, "The number of layers in each octave. 3 is the value used in D. Lowe paper. The number of octaves is computed automatically from the image resolution."); PARAMETER(Feature2D, SIFT_contrastThreshold, double, 0.04, "The contrast threshold used to filter out weak features in semi-uniform (low-contrast) regions. The larger the threshold, the less features are produced by the detector."); @@ -181,7 +197,6 @@ class FINDOBJECT_EXP Settings PARAMETER(Feature2D, SURF_upright, bool, false, "Up-right or rotated features flag (true - do not compute orientation of features; false - compute orientation)."); PARAMETER(Feature2D, SURF_gpu, bool, false, "GPU-SURF: Use GPU version of SURF. This option is enabled only if OpenCV is built with CUDA and GPUs are detected."); PARAMETER(Feature2D, SURF_keypointsRatio, float, 0.01f, "Used with SURF GPU."); -#endif PARAMETER(Feature2D, Star_maxSize, int, 45, ""); PARAMETER(Feature2D, Star_responseThreshold, int, 30, ""); @@ -198,6 +213,20 @@ class FINDOBJECT_EXP Settings PARAMETER(Feature2D, FREAK_patternScale, float, 22.0f, "Scaling of the description pattern."); PARAMETER(Feature2D, FREAK_nOctaves, int, 4, "Number of octaves covered by the detected keypoints."); + PARAMETER(Feature2D, LUCID_kernel, int, 1, "Kernel for descriptor construction, where 1=3x3, 2=5x5, 3=7x7 and so forth."); + PARAMETER(Feature2D, LUCID_blur_kernel, int, 2, "Kernel for blurring image prior to descriptor construction, where 1=3x3, 2=5x5, 3=7x7 and so forth."); + + PARAMETER(Feature2D, LATCH_bytes, int, 32, "Size of the descriptor - can be 64, 32, 16, 8, 4, 2 or 1."); + PARAMETER(Feature2D, LATCH_rotationInvariance, bool, true, "Whether or not the descriptor should compansate for orientation changes."); + PARAMETER(Feature2D, LATCH_half_ssd_size, int, 3, "The size of half of the mini-patches size. For example, if we would like to compare triplets of patches of size 7x7x then the half_ssd_size should be (7-1)/2 = 3."); + + PARAMETER(Feature2D, DAISY_radius, float, 15, "Radius of the descriptor at the initial scale."); + PARAMETER(Feature2D, DAISY_q_radius, int, 3, "Amount of radial range division quantity."); + PARAMETER(Feature2D, DAISY_q_theta, int, 8, "Amount of angular range division quantity."); + PARAMETER(Feature2D, DAISY_q_hist, int, 8, "Amount of gradient orientations range division quantity."); + PARAMETER(Feature2D, DAISY_interpolation, bool, true, "Switch to disable interpolation for speed improvement at minor quality loss."); + PARAMETER(Feature2D, DAISY_use_orientation, bool, false, "Sample patterns using keypoints orientation, disabled by default."); + PARAMETER_COND(NearestNeighbor, 1Strategy, QString, FINDOBJECT_NONFREE, "1:Linear;KDTree;KMeans;Composite;Autotuned;Lsh;BruteForce", "6:Linear;KDTree;KMeans;Composite;Autotuned;Lsh;BruteForce", "Nearest neighbor strategy."); PARAMETER_COND(NearestNeighbor, 2Distance_type, QString, FINDOBJECT_NONFREE, "0:EUCLIDEAN_L2;MANHATTAN_L1;MINKOWSKI;MAX;HIST_INTERSECT;HELLINGER;CHI_SQUARE_CS;KULLBACK_LEIBLER_KL;HAMMING", "1:EUCLIDEAN_L2;MANHATTAN_L1;MINKOWSKI;MAX;HIST_INTERSECT;HELLINGER;CHI_SQUARE_CS;KULLBACK_LEIBLER_KL;HAMMING", "Distance type."); PARAMETER(NearestNeighbor, 3nndrRatioUsed, bool, true, "Nearest neighbor distance ratio approach to accept the best match."); @@ -317,30 +346,32 @@ private: class KeypointDetector { public: - KeypointDetector(cv::FeatureDetector * featureDetector); + KeypointDetector(cv::Ptr & featureDetector); KeypointDetector(GPUFeature2D * gpuFeature2D); + virtual ~KeypointDetector(); void detect(const cv::Mat & image, std::vector & keypoints, const cv::Mat & mask = cv::Mat()); private: - cv::FeatureDetector * featureDetector_; + cv::Ptr featureDetector_; GPUFeature2D * gpuFeature2D_; }; class DescriptorExtractor { public: - DescriptorExtractor(cv::DescriptorExtractor * descriptorExtractor); + DescriptorExtractor(cv::Ptr & descriptorExtractor); DescriptorExtractor(GPUFeature2D * gpuFeature2D); + virtual ~DescriptorExtractor(); void compute(const cv::Mat & image, std::vector & keypoints, cv::Mat & descriptors); private: - cv::DescriptorExtractor * descriptorExtractor_; + cv::Ptr descriptorExtractor_; GPUFeature2D * gpuFeature2D_; }; diff --git a/src/AddObjectDialog.cpp b/src/AddObjectDialog.cpp index be5647d7..505f8c7c 100644 --- a/src/AddObjectDialog.cpp +++ b/src/AddObjectDialog.cpp @@ -382,7 +382,7 @@ void AddObjectDialog::update(const cv::Mat & image) } // Extract keypoints - cv::vector keypoints; + std::vector keypoints; detector_->detect(cameraImage_, keypoints); ui_->cameraView->setData(keypoints, cvtCvMat2QImage(cameraImage_)); diff --git a/src/FindObject.cpp b/src/FindObject.cpp index 972c883c..ed984d81 100644 --- a/src/FindObject.cpp +++ b/src/FindObject.cpp @@ -533,6 +533,7 @@ protected: keypoints_.clear(); descriptors_ = cv::Mat(); detector_->detect(image_, keypoints_); + UDEBUG("Detected %d keypoints from object %d...", (int)keypoints_.size(), objectId_); if(keypoints_.size()) { @@ -548,6 +549,7 @@ protected: try { extractor_->compute(image_, keypoints_, descriptors_); + UDEBUG("Extracted %d descriptors from object %d...", descriptors_.rows, objectId_); } catch(cv::Exception & e) { @@ -556,6 +558,14 @@ protected: descriptors_ = cv::Mat(); keypoints_.clear(); } + catch ( const std::exception& e ) + { + // standard exceptions + UERROR("Descriptor exception: %s. Maybe some keypoints are invalid " + "for the selected descriptor extractor.", e.what()); + descriptors_ = cv::Mat(); + keypoints_.clear(); + } timeExtraction_+=timeStep.restart(); if((int)keypoints_.size() != descriptors_.rows) diff --git a/src/MainWindow.cpp b/src/MainWindow.cpp index 3f4ff080..edaf8387 100644 --- a/src/MainWindow.cpp +++ b/src/MainWindow.cpp @@ -49,7 +49,12 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "opencv2/calib3d/calib3d.hpp" #include "opencv2/imgproc/imgproc.hpp" -#include "opencv2/gpu/gpu.hpp" +#include +#if CV_MAJOR_VERSION < 3 +#include +#else +#include +#endif #include #include @@ -133,7 +138,11 @@ MainWindow::MainWindow(FindObject * findObject, Camera * camera, QWidget * paren ui_->actionLoad_scene_from_file->setVisible(false); } +#if CV_MAJOR_VERSION < 3 if(cv::gpu::getCudaEnabledDeviceCount() == 0) +#else + if(cv::cuda::getCudaEnabledDeviceCount() == 0) +#endif { #if FINDOBJECT_NONFREE == 1 ui_->toolBox->updateParameter(Settings::kFeature2D_SURF_gpu()); diff --git a/src/ParametersToolBox.cpp b/src/ParametersToolBox.cpp index 89855e74..b92259da 100644 --- a/src/ParametersToolBox.cpp +++ b/src/ParametersToolBox.cpp @@ -38,6 +38,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include #include #include "find_object/utilite/ULogger.h" +#include namespace find_object { @@ -337,27 +338,59 @@ void ParametersToolBox::addParameter(QVBoxLayout * layout, widget->setObjectName(key); QStringList splitted = value.split(':'); widget->addItems(splitted.last().split(';')); -#if FINDOBJECT_NONFREE == 0 + if(key.compare(Settings::kFeature2D_1Detector()) == 0) { +#if FINDOBJECT_NONFREE == 0 widget->setItemData(5, 0, Qt::UserRole - 1); // disable SIFT widget->setItemData(7, 0, Qt::UserRole - 1); // disable SURF +#endif +#if CV_MAJOR_VERSION < 3 + widget->setItemData(9, 0, Qt::UserRole - 1); // disable AGAST + widget->setItemData(10, 0, Qt::UserRole - 1); // disable KAZE + widget->setItemData(11, 0, Qt::UserRole - 1); // disable AKAZE +#else + widget->setItemData(0, 0, Qt::UserRole - 1); // disable Dense +#ifndef HAVE_OPENCV_XFEATURES2D + widget->setItemData(6, 0, Qt::UserRole - 1); // disable Star +#endif +#endif } if(key.compare(Settings::kFeature2D_2Descriptor()) == 0) { +#if FINDOBJECT_NONFREE == 0 widget->setItemData(2, 0, Qt::UserRole - 1); // disable SIFT widget->setItemData(3, 0, Qt::UserRole - 1); // disable SURF +#endif +#if CV_MAJOR_VERSION < 3 + widget->setItemData(6, 0, Qt::UserRole - 1); // disable KAZE + widget->setItemData(7, 0, Qt::UserRole - 1); // disable AKAZE + widget->setItemData(8, 0, Qt::UserRole - 1); // disable LUCID + widget->setItemData(9, 0, Qt::UserRole - 1); // disable LATCH + widget->setItemData(10, 0, Qt::UserRole - 1); // disable DAISY +#else + +#ifndef HAVE_OPENCV_XFEATURES2D + widget->setItemData(0, 0, Qt::UserRole - 1); // disable Brief + widget->setItemData(5, 0, Qt::UserRole - 1); // disable Freak + widget->setItemData(8, 0, Qt::UserRole - 1); // disable LUCID + widget->setItemData(9, 0, Qt::UserRole - 1); // disable LATCH + widget->setItemData(10, 0, Qt::UserRole - 1); // disable DAISY +#endif +#endif } if(key.compare(Settings::kNearestNeighbor_1Strategy()) == 0) { +#if FINDOBJECT_NONFREE == 0 && CV_MAJOR_VERSION < 3 // disable FLANN approaches (cannot be used with binary descriptors) widget->setItemData(0, 0, Qt::UserRole - 1); widget->setItemData(1, 0, Qt::UserRole - 1); widget->setItemData(2, 0, Qt::UserRole - 1); widget->setItemData(3, 0, Qt::UserRole - 1); widget->setItemData(4, 0, Qt::UserRole - 1); - } #endif + } + widget->setCurrentIndex(splitted.first().toInt()); connect(widget, SIGNAL(currentIndexChanged(int)), this, SLOT(changeParameter(int))); addParameter(layout, key, widget); @@ -525,7 +558,10 @@ void ParametersToolBox::changeParameter(const int & value) bool isBinaryDescriptor = descriptorBox->currentText().compare("ORB") == 0 || descriptorBox->currentText().compare("Brief") == 0 || descriptorBox->currentText().compare("BRISK") == 0 || - descriptorBox->currentText().compare("FREAK") == 0; + descriptorBox->currentText().compare("FREAK") == 0 || + descriptorBox->currentText().compare("AKAZE") == 0 || + descriptorBox->currentText().compare("LATCH") == 0 || + descriptorBox->currentText().compare("LUCID") == 0; if(isBinaryDescriptor && nnBox->currentText().compare("Lsh") != 0 && nnBox->currentText().compare("BruteForce") != 0) { QMessageBox::warning(this, diff --git a/src/Settings.cpp b/src/Settings.cpp index 5338c684..4fd4445d 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -34,11 +34,29 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include #include #include -#if FINDOBJECT_NONFREE == 1 -#include -#include -#endif +#include + +#if CV_MAJOR_VERSION < 3 #include +#define CVCUDA cv::gpu +#else +#include +#define CVCUDA cv::cuda +#ifdef HAVE_OPENCV_CUDAFEATURES2D +#include +#endif +#endif + +#ifdef HAVE_OPENCV_NONFREE + #if CV_MAJOR_VERSION == 2 && CV_MINOR_VERSION >=4 + #include + #include + #endif +#endif +#ifdef HAVE_OPENCV_XFEATURES2D + #include + #include +#endif namespace find_object { @@ -166,7 +184,7 @@ void Settings::loadSettings(const QString & fileName) UINFO("Settings set to defaults."); } - if(cv::gpu::getCudaEnabledDeviceCount() == 0) + if(CVCUDA::getCudaEnabledDeviceCount() == 0) { #if FINDOBJECT_NONFREE == 1 Settings::setFeature2D_SURF_gpu(false); @@ -292,8 +310,8 @@ public: std::vector & keypoints, const cv::Mat & mask = cv::Mat()) { - cv::gpu::GpuMat imgGpu(image); - cv::gpu::GpuMat maskGpu(mask); + CVCUDA::GpuMat imgGpu(image); + CVCUDA::GpuMat maskGpu(mask); try { surf_(imgGpu, maskGpu, keypoints); @@ -314,11 +332,11 @@ public: cv::Mat& descriptors) { std::vector d; - cv::gpu::GpuMat imgGpu(image); - cv::gpu::GpuMat descriptorsGPU; + CVCUDA::GpuMat imgGpu(image); + CVCUDA::GpuMat descriptorsGPU; try { - surf_(imgGpu, cv::gpu::GpuMat(), keypoints, descriptorsGPU, true); + surf_(imgGpu, CVCUDA::GpuMat(), keypoints, descriptorsGPU, true); } catch(cv::Exception &e) { @@ -341,7 +359,11 @@ public: } } private: - cv::gpu::SURF_GPU surf_; // HACK: static because detectImpl() is const! +#if CV_MAJOR_VERSION < 3 + CVCUDA::SURF_GPU surf_; +#else + CVCUDA::SURF_CUDA surf_; +#endif }; #endif @@ -350,10 +372,18 @@ class GPUFAST : public GPUFeature2D public: GPUFAST(int threshold=Settings::defaultFeature2D_Fast_threshold(), bool nonmaxSuppression=Settings::defaultFeature2D_Fast_nonmaxSuppression(), - double keypointsRatio=Settings::defaultFeature2D_Fast_keypointsRatio()) : - fast_(threshold, + double keypointsRatio=Settings::defaultFeature2D_Fast_keypointsRatio()) +#if CV_MAJOR_VERSION < 3 + : fast_(threshold, nonmaxSuppression, keypointsRatio) +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + : fast_(threshold, + nonmaxSuppression, + keypointsRatio) +#endif +#endif { } virtual ~GPUFAST() {} @@ -363,9 +393,15 @@ protected: std::vector & keypoints, const cv::Mat & mask = cv::Mat()) { - cv::gpu::GpuMat imgGpu(image); - cv::gpu::GpuMat maskGpu(mask); + CVCUDA::GpuMat imgGpu(image); + CVCUDA::GpuMat maskGpu(mask); +#if CV_MAJOR_VERSION < 3 fast_(imgGpu, maskGpu, keypoints); +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + fast_(imgGpu, maskGpu, keypoints); +#endif +#endif } void computeDescriptors( const cv::Mat& image, std::vector& keypoints, @@ -375,7 +411,13 @@ protected: } private: - cv::gpu::FAST_GPU fast_; +#if CV_MAJOR_VERSION < 3 + CVCUDA::FAST_GPU fast_; +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + CVCUDA::FAST_GPU fast_; +#endif +#endif }; class GPUORB : public GPUFeature2D @@ -390,8 +432,9 @@ public: int scoreType = Settings::defaultFeature2D_ORB_scoreType(), int patchSize = Settings::defaultFeature2D_ORB_patchSize(), int fastThreshold = Settings::defaultFeature2D_Fast_threshold(), - bool fastNonmaxSupression = Settings::defaultFeature2D_Fast_nonmaxSuppression()) : - orb_(nFeatures, + bool fastNonmaxSupression = Settings::defaultFeature2D_Fast_nonmaxSuppression()) +#if CV_MAJOR_VERSION < 3 + : orb_(nFeatures, scaleFactor, nLevels, edgeThreshold , @@ -399,8 +442,26 @@ public: WTA_K, scoreType, patchSize) +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + : orb_(nFeatures, + scaleFactor, + nLevels, + edgeThreshold , + firstLevel, + WTA_K, + scoreType, + patchSize) +#endif +#endif { +#if CV_MAJOR_VERSION < 3 orb_.setFastParams(fastThreshold, fastNonmaxSupression); +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + orb_.setFastParams(fastThreshold, fastNonmaxSupression); +#endif +#endif } virtual ~GPUORB() {} @@ -409,11 +470,19 @@ protected: std::vector & keypoints, const cv::Mat & mask = cv::Mat()) { - cv::gpu::GpuMat imgGpu(image); - cv::gpu::GpuMat maskGpu(mask); + + CVCUDA::GpuMat imgGpu(image); + CVCUDA::GpuMat maskGpu(mask); + try { +#if CV_MAJOR_VERSION < 3 orb_(imgGpu, maskGpu, keypoints); +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + orb_(imgGpu, maskGpu, keypoints); +#endif +#endif } catch(cv::Exception &e) { @@ -429,11 +498,19 @@ protected: cv::Mat& descriptors) { std::vector d; - cv::gpu::GpuMat imgGpu(image); - cv::gpu::GpuMat descriptorsGPU; + + CVCUDA::GpuMat imgGpu(image); + CVCUDA::GpuMat descriptorsGPU; + try { - orb_(imgGpu, cv::gpu::GpuMat(), keypoints, descriptorsGPU); // No option to use provided keypoints!? +#if CV_MAJOR_VERSION < 3 + orb_(imgGpu, CVCUDA::GpuMat(), keypoints, descriptorsGPU); // No option to use provided keypoints!? +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + orb_(imgGpu, CVCUDA::GpuMat(), keypoints, descriptorsGPU); // No option to use provided keypoints!? +#endif +#endif } catch(cv::Exception &e) { @@ -453,12 +530,18 @@ protected: } } private: - cv::gpu::ORB_GPU orb_; +#if CV_MAJOR_VERSION < 3 + CVCUDA::ORB_GPU orb_; +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + CVCUDA::ORB_GPU orb_; +#endif +#endif }; KeypointDetector * Settings::createKeypointDetector() { - cv::FeatureDetector * detector = 0; + cv::Ptr detector; GPUFeature2D * detectorGPU = 0; QString str = getFeature2D_1Detector(); QStringList split = str.split(':'); @@ -470,7 +553,7 @@ KeypointDetector * Settings::createKeypointDetector() { QStringList strategies = split.last().split(';'); - if(strategies.size() == 9 && index>=0 && index<9) + if(index>=0 && index(new cv::DenseFeatureDetector( + getFeature2D_Dense_initFeatureScale(), + getFeature2D_Dense_featureScaleLevels(), + getFeature2D_Dense_featureScaleMul(), + getFeature2D_Dense_initXyStep(), + getFeature2D_Dense_initImgBound(), + getFeature2D_Dense_varyXyStepWithScale(), + getFeature2D_Dense_varyImgBoundWithScale())); +#else + UWARN("Find-Object is not built with OpenCV 2 so Dense cannot be used!"); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("Fast") == 0) { - if(getFeature2D_Fast_gpu() && cv::gpu::getCudaEnabledDeviceCount()) + if(getFeature2D_Fast_gpu() && CVCUDA::getCudaEnabledDeviceCount()) { detectorGPU = new GPUFAST( getFeature2D_Fast_threshold(), @@ -509,26 +596,65 @@ KeypointDetector * Settings::createKeypointDetector() } else { - detector = new cv::FastFeatureDetector( +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::FastFeatureDetector( + getFeature2D_Fast_threshold(), + getFeature2D_Fast_nonmaxSuppression())); +#else + detector = cv::FastFeatureDetector::create( getFeature2D_Fast_threshold(), getFeature2D_Fast_nonmaxSuppression()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } } + else if(strategies.at(index).compare("AGAST") == 0) + { +#if CV_MAJOR_VERSION < 3 + UWARN("Find-Object is not built with OpenCV 3 so AGAST cannot be used!"); +#else + detector = cv::AgastFeatureDetector::create( + getFeature2D_AGAST_threshold(), + getFeature2D_AGAST_nonmaxSuppression()); +#endif + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } else if(strategies.at(index).compare("GFTT") == 0) { - detector = new cv::GFTTDetector( +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::GFTTDetector( + getFeature2D_GFTT_maxCorners(), + getFeature2D_GFTT_qualityLevel(), + getFeature2D_GFTT_minDistance(), + getFeature2D_GFTT_blockSize(), + getFeature2D_GFTT_useHarrisDetector(), + getFeature2D_GFTT_k())); +#else + detector = cv::GFTTDetector::create( getFeature2D_GFTT_maxCorners(), getFeature2D_GFTT_qualityLevel(), getFeature2D_GFTT_minDistance(), getFeature2D_GFTT_blockSize(), getFeature2D_GFTT_useHarrisDetector(), getFeature2D_GFTT_k()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("MSER") == 0) { - detector = new cv::MSER( +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::MSER( + getFeature2D_MSER_delta(), + getFeature2D_MSER_minArea(), + getFeature2D_MSER_maxArea(), + getFeature2D_MSER_maxVariation(), + getFeature2D_MSER_minDiversity(), + getFeature2D_MSER_maxEvolution(), + getFeature2D_MSER_areaThreshold(), + getFeature2D_MSER_minMargin(), + getFeature2D_MSER_edgeBlurSize())); +#else + detector = cv::MSER::create( getFeature2D_MSER_delta(), getFeature2D_MSER_minArea(), getFeature2D_MSER_maxArea(), @@ -538,11 +664,12 @@ KeypointDetector * Settings::createKeypointDetector() getFeature2D_MSER_areaThreshold(), getFeature2D_MSER_minMargin(), getFeature2D_MSER_edgeBlurSize()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("ORB") == 0) { - if(getFeature2D_ORB_gpu() && cv::gpu::getCudaEnabledDeviceCount()) + if(getFeature2D_ORB_gpu() && CVCUDA::getCudaEnabledDeviceCount()) { detectorGPU = new GPUORB( getFeature2D_ORB_nFeatures(), @@ -559,7 +686,18 @@ KeypointDetector * Settings::createKeypointDetector() } else { - detector = new cv::ORB( +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::ORB( + getFeature2D_ORB_nFeatures(), + getFeature2D_ORB_scaleFactor(), + getFeature2D_ORB_nLevels(), + getFeature2D_ORB_edgeThreshold(), + getFeature2D_ORB_firstLevel(), + getFeature2D_ORB_WTA_K(), + getFeature2D_ORB_scoreType(), + getFeature2D_ORB_patchSize())); +#else + detector = cv::ORB::create( getFeature2D_ORB_nFeatures(), getFeature2D_ORB_scaleFactor(), getFeature2D_ORB_nLevels(), @@ -568,41 +706,102 @@ KeypointDetector * Settings::createKeypointDetector() getFeature2D_ORB_WTA_K(), getFeature2D_ORB_scoreType(), getFeature2D_ORB_patchSize()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } } else if(strategies.at(index).compare("Star") == 0) { - detector = new cv::StarFeatureDetector( - getFeature2D_Star_maxSize(), - getFeature2D_Star_responseThreshold(), - getFeature2D_Star_lineThresholdProjected(), - getFeature2D_Star_lineThresholdBinarized(), - getFeature2D_Star_suppressNonmaxSize()); +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::StarFeatureDetector( + getFeature2D_Star_maxSize(), + getFeature2D_Star_responseThreshold(), + getFeature2D_Star_lineThresholdProjected(), + getFeature2D_Star_lineThresholdBinarized(), + getFeature2D_Star_suppressNonmaxSize())); +#else +#ifdef HAVE_OPENCV_XFEATURES2D + detector = cv::xfeatures2d::StarDetector::create( + getFeature2D_Star_maxSize(), + getFeature2D_Star_responseThreshold(), + getFeature2D_Star_lineThresholdProjected(), + getFeature2D_Star_lineThresholdBinarized(), + getFeature2D_Star_suppressNonmaxSize()); +#else + UWARN("Find-Object is not built with OpenCV xfeatures2d module so Star cannot be used!"); +#endif +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("BRISK") == 0) { - detector = new cv::BRISK( +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::BRISK( + getFeature2D_BRISK_thresh(), + getFeature2D_BRISK_octaves(), + getFeature2D_BRISK_patternScale())); +#else + detector = cv::BRISK::create( getFeature2D_BRISK_thresh(), getFeature2D_BRISK_octaves(), getFeature2D_BRISK_patternScale()); +#endif + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } + else if(strategies.at(index).compare("KAZE") == 0) + { +#if CV_MAJOR_VERSION < 3 + UWARN("Find-Object is not built with OpenCV 3 so KAZE cannot be used!"); +#else + detector = cv::KAZE::create( + getFeature2D_KAZE_extended(), + getFeature2D_KAZE_upright(), + getFeature2D_KAZE_threshold(), + getFeature2D_KAZE_nOctaves(), + getFeature2D_KAZE_nOctaveLayers(), + cv::KAZE::DIFF_PM_G2); // FIXME: make a parameter +#endif + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } + else if(strategies.at(index).compare("AKAZE") == 0) + { +#if CV_MAJOR_VERSION < 3 + UWARN("Find-Object is not built with OpenCV 3 so AKAZE cannot be used!"); +#else + detector = cv::AKAZE::create( + cv::AKAZE::DESCRIPTOR_MLDB, // FIXME: make a parameter + getFeature2D_AKAZE_descriptorSize(), + getFeature2D_AKAZE_descriptorChannels(), + getFeature2D_AKAZE_threshold(), + getFeature2D_AKAZE_nOctaves(), + getFeature2D_AKAZE_nOctaveLayers(), + cv::KAZE::DIFF_PM_G2); // FIXME: make a parameter +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } #if FINDOBJECT_NONFREE == 1 else if(strategies.at(index).compare("SIFT") == 0) { - detector = new cv::SIFT( +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::SIFT( + getFeature2D_SIFT_nfeatures(), + getFeature2D_SIFT_nOctaveLayers(), + getFeature2D_SIFT_contrastThreshold(), + getFeature2D_SIFT_edgeThreshold(), + getFeature2D_SIFT_sigma())); +#else + detector = cv::xfeatures2d::SIFT::create( getFeature2D_SIFT_nfeatures(), getFeature2D_SIFT_nOctaveLayers(), getFeature2D_SIFT_contrastThreshold(), getFeature2D_SIFT_edgeThreshold(), getFeature2D_SIFT_sigma()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("SURF") == 0) { - if(getFeature2D_SURF_gpu() && cv::gpu::getCudaEnabledDeviceCount()) + if(getFeature2D_SURF_gpu() && CVCUDA::getCudaEnabledDeviceCount()) { detectorGPU = new GPUSURF( getFeature2D_SURF_hessianThreshold(), @@ -615,12 +814,21 @@ KeypointDetector * Settings::createKeypointDetector() } else { - detector = new cv::SURF( +#if CV_MAJOR_VERSION < 3 + detector = cv::Ptr(new cv::SURF( + getFeature2D_SURF_hessianThreshold(), + getFeature2D_SURF_nOctaves(), + getFeature2D_SURF_nOctaveLayers(), + getFeature2D_SURF_extended(), + getFeature2D_SURF_upright())); +#else + detector = cv::xfeatures2d::SURF::create( getFeature2D_SURF_hessianThreshold(), getFeature2D_SURF_nOctaves(), getFeature2D_SURF_nOctaveLayers(), getFeature2D_SURF_extended(), getFeature2D_SURF_upright()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } } @@ -629,7 +837,7 @@ KeypointDetector * Settings::createKeypointDetector() } } - UASSERT(detectorGPU!=0 || detector!=0); + UASSERT(detectorGPU!=0 || !detector.empty()); if(detectorGPU) { return new KeypointDetector(detectorGPU); @@ -642,7 +850,7 @@ KeypointDetector * Settings::createKeypointDetector() DescriptorExtractor * Settings::createDescriptorExtractor() { - cv::DescriptorExtractor * extractor = 0; + cv::Ptr extractor; GPUFeature2D * extractorGPU = 0; QString str = getFeature2D_2Descriptor(); QStringList split = str.split(':'); @@ -653,7 +861,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor() if(ok) { QStringList strategies = split.last().split(';'); - if(strategies.size() == 6 && index>=0 && index<6) + if(index>=0 && index(new cv::BriefDescriptorExtractor( + getFeature2D_Brief_bytes())); +#else +#ifdef HAVE_OPENCV_XFEATURES2D + extractor = cv::xfeatures2d::BriefDescriptorExtractor::create( + getFeature2D_Brief_bytes()); +#else + UWARN("Find-Object is not built with OpenCV xfeatures2d module so Brief cannot be used!"); +#endif +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("ORB") == 0) { - if(getFeature2D_ORB_gpu() && cv::gpu::getCudaEnabledDeviceCount()) + if(getFeature2D_ORB_gpu() && CVCUDA::getCudaEnabledDeviceCount()) { extractorGPU = new GPUORB( getFeature2D_ORB_nFeatures(), @@ -694,7 +911,18 @@ DescriptorExtractor * Settings::createDescriptorExtractor() } else { - extractor = new cv::ORB( +#if CV_MAJOR_VERSION < 3 + extractor = cv::Ptr(new cv::ORB( + getFeature2D_ORB_nFeatures(), + getFeature2D_ORB_scaleFactor(), + getFeature2D_ORB_nLevels(), + getFeature2D_ORB_edgeThreshold(), + getFeature2D_ORB_firstLevel(), + getFeature2D_ORB_WTA_K(), + getFeature2D_ORB_scoreType(), + getFeature2D_ORB_patchSize())); +#else + extractor = cv::ORB::create( getFeature2D_ORB_nFeatures(), getFeature2D_ORB_scaleFactor(), getFeature2D_ORB_nLevels(), @@ -703,40 +931,134 @@ DescriptorExtractor * Settings::createDescriptorExtractor() getFeature2D_ORB_WTA_K(), getFeature2D_ORB_scoreType(), getFeature2D_ORB_patchSize()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } } else if(strategies.at(index).compare("BRISK") == 0) { - extractor = new cv::BRISK( +#if CV_MAJOR_VERSION < 3 + extractor = cv::Ptr(new cv::BRISK( + getFeature2D_BRISK_thresh(), + getFeature2D_BRISK_octaves(), + getFeature2D_BRISK_patternScale())); +#else + extractor = cv::BRISK::create( getFeature2D_BRISK_thresh(), getFeature2D_BRISK_octaves(), getFeature2D_BRISK_patternScale()); +#endif + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } + else if(strategies.at(index).compare("KAZE") == 0) + { +#if CV_MAJOR_VERSION < 3 + UWARN("Find-Object is not built with OpenCV 3 so KAZE cannot be used!"); +#else + extractor = cv::KAZE::create( + getFeature2D_KAZE_extended(), + getFeature2D_KAZE_upright(), + getFeature2D_KAZE_threshold(), + getFeature2D_KAZE_nOctaves(), + getFeature2D_KAZE_nOctaveLayers(), + cv::KAZE::DIFF_PM_G2); // FIXME: make a parameter +#endif + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } + else if(strategies.at(index).compare("AKAZE") == 0) + { +#if CV_MAJOR_VERSION < 3 + UWARN("Find-Object is not built with OpenCV 3 so AKAZE cannot be used!"); +#else + extractor = cv::AKAZE::create( + cv::AKAZE::DESCRIPTOR_MLDB, // FIXME: make a parameter + getFeature2D_AKAZE_descriptorSize(), + getFeature2D_AKAZE_descriptorChannels(), + getFeature2D_AKAZE_threshold(), + getFeature2D_AKAZE_nOctaves(), + getFeature2D_AKAZE_nOctaveLayers(), + cv::KAZE::DIFF_PM_G2); // FIXME: make a parameter +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("FREAK") == 0) { - extractor = new cv::FREAK( +#if CV_MAJOR_VERSION < 3 + extractor = cv::Ptr(new cv::FREAK( + getFeature2D_FREAK_orientationNormalized(), + getFeature2D_FREAK_scaleNormalized(), + getFeature2D_FREAK_patternScale(), + getFeature2D_FREAK_nOctaves())); +#else +#ifdef HAVE_OPENCV_XFEATURES2D + extractor = cv::xfeatures2d::FREAK::create( getFeature2D_FREAK_orientationNormalized(), getFeature2D_FREAK_scaleNormalized(), getFeature2D_FREAK_patternScale(), getFeature2D_FREAK_nOctaves()); +#else + UWARN("Find-Object is not built with OpenCV xfeatures2d module so Freak cannot be used!"); +#endif +#endif + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } +#ifdef HAVE_OPENCV_XFEATURES2D + else if(strategies.at(index).compare("LUCID") == 0) + { + extractor = cv::xfeatures2d::LUCID::create( + getFeature2D_LUCID_kernel(), + getFeature2D_LUCID_blur_kernel()); + + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } + else if(strategies.at(index).compare("LATCH") == 0) + { + extractor = cv::xfeatures2d::LATCH::create( + getFeature2D_LATCH_bytes(), + getFeature2D_LATCH_rotationInvariance(), + getFeature2D_LATCH_half_ssd_size()); + + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } + else if(strategies.at(index).compare("DAISY") == 0) + { + extractor = cv::xfeatures2d::DAISY::create( + getFeature2D_DAISY_radius(), + getFeature2D_DAISY_q_radius(), + getFeature2D_DAISY_q_theta(), + getFeature2D_DAISY_q_hist(), + cv::xfeatures2d::DAISY::NRM_NONE, + cv::noArray(), + getFeature2D_DAISY_interpolation(), + getFeature2D_DAISY_use_orientation()); + + UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); + } +#endif #if FINDOBJECT_NONFREE == 1 else if(strategies.at(index).compare("SIFT") == 0) { - extractor = new cv::SIFT( +#if CV_MAJOR_VERSION < 3 + extractor = cv::Ptr(new cv::SIFT( + getFeature2D_SIFT_nfeatures(), + getFeature2D_SIFT_nOctaveLayers(), + getFeature2D_SIFT_contrastThreshold(), + getFeature2D_SIFT_edgeThreshold(), + getFeature2D_SIFT_sigma())); +#else + extractor = cv::xfeatures2d::SIFT::create( getFeature2D_SIFT_nfeatures(), getFeature2D_SIFT_nOctaveLayers(), getFeature2D_SIFT_contrastThreshold(), getFeature2D_SIFT_edgeThreshold(), getFeature2D_SIFT_sigma()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } else if(strategies.at(index).compare("SURF") == 0) { - if(getFeature2D_SURF_gpu() && cv::gpu::getCudaEnabledDeviceCount()) + if(getFeature2D_SURF_gpu() && CVCUDA::getCudaEnabledDeviceCount()) { extractorGPU = new GPUSURF( getFeature2D_SURF_hessianThreshold(), @@ -749,12 +1071,21 @@ DescriptorExtractor * Settings::createDescriptorExtractor() } else { - extractor = new cv::SURF( +#if CV_MAJOR_VERSION < 3 + extractor = cv::Ptr(new cv::SURF( + getFeature2D_SURF_hessianThreshold(), + getFeature2D_SURF_nOctaves(), + getFeature2D_SURF_nOctaveLayers(), + getFeature2D_SURF_extended(), + getFeature2D_SURF_upright())); +#else + extractor = cv::xfeatures2d::SURF::create( getFeature2D_SURF_hessianThreshold(), getFeature2D_SURF_nOctaves(), getFeature2D_SURF_nOctaveLayers(), getFeature2D_SURF_extended(), getFeature2D_SURF_upright()); +#endif UDEBUG("type=%s", strategies.at(index).toStdString().c_str()); } } @@ -763,7 +1094,7 @@ DescriptorExtractor * Settings::createDescriptorExtractor() } } - UASSERT(extractorGPU!=0 || extractor!=0); + UASSERT(extractorGPU!=0 || !extractor.empty()); if(extractorGPU) { return new DescriptorExtractor(extractorGPU); @@ -986,23 +1317,29 @@ int Settings::getHomographyMethod() return method; } -KeypointDetector::KeypointDetector(cv::FeatureDetector * featureDetector) : +KeypointDetector::KeypointDetector(cv::Ptr & featureDetector) : featureDetector_(featureDetector), gpuFeature2D_(0) { - UASSERT(featureDetector_!=0); + UASSERT(!featureDetector_.empty()); } KeypointDetector::KeypointDetector(GPUFeature2D * gpuFeature2D) : - featureDetector_(0), gpuFeature2D_(gpuFeature2D) { UASSERT(gpuFeature2D_!=0); } +KeypointDetector::~KeypointDetector() +{ + if(gpuFeature2D_) + { + delete gpuFeature2D_; + } +} void KeypointDetector::detect(const cv::Mat & image, std::vector & keypoints, const cv::Mat & mask) { - if(featureDetector_) + if(!featureDetector_.empty()) { featureDetector_->detect(image, keypoints, mask); } @@ -1012,23 +1349,29 @@ void KeypointDetector::detect(const cv::Mat & image, } } -DescriptorExtractor::DescriptorExtractor(cv::DescriptorExtractor * descriptorExtractor) : +DescriptorExtractor::DescriptorExtractor(cv::Ptr & descriptorExtractor) : descriptorExtractor_(descriptorExtractor), gpuFeature2D_(0) { - UASSERT(descriptorExtractor_!=0); + UASSERT(!descriptorExtractor_.empty()); } DescriptorExtractor::DescriptorExtractor(GPUFeature2D * gpuFeature2D) : - descriptorExtractor_(0), gpuFeature2D_(gpuFeature2D) { UASSERT(gpuFeature2D_!=0); } +DescriptorExtractor::~DescriptorExtractor() +{ + if(gpuFeature2D_) + { + delete gpuFeature2D_; + } +} void DescriptorExtractor::compute(const cv::Mat & image, std::vector & keypoints, cv::Mat & descriptors) { - if(descriptorExtractor_) + if(!descriptorExtractor_.empty()) { descriptorExtractor_->compute(image, keypoints, descriptors); } diff --git a/src/Vocabulary.cpp b/src/Vocabulary.cpp index 5fd7e1e9..2fb02291 100644 --- a/src/Vocabulary.cpp +++ b/src/Vocabulary.cpp @@ -31,7 +31,16 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "Vocabulary.h" #include #include +#if CV_MAJOR_VERSION < 3 #include +#define CVCUDA cv::gpu +#else +#include +#define CVCUDA cv::cuda +#ifdef HAVE_OPENCV_CUDAFEATURES2D +#include +#endif +#endif namespace find_object { @@ -264,20 +273,36 @@ void Vocabulary::search(const cv::Mat & descriptors, cv::Mat & results, cv::Mat if(Settings::isBruteForceNearestNeighbor()) { std::vector > matches; - if(Settings::getNearestNeighbor_BruteForce_gpu() && cv::gpu::getCudaEnabledDeviceCount()) + if(Settings::getNearestNeighbor_BruteForce_gpu() && CVCUDA::getCudaEnabledDeviceCount()) { - cv::gpu::GpuMat newDescriptorsGpu(descriptors); - cv::gpu::GpuMat lastDescriptorsGpu(indexedDescriptors_); + CVCUDA::GpuMat newDescriptorsGpu(descriptors); + CVCUDA::GpuMat lastDescriptorsGpu(indexedDescriptors_); +#if CV_MAJOR_VERSION < 3 if(indexedDescriptors_.type()==CV_8U) { - cv::gpu::BruteForceMatcher_GPU gpuMatcher; + CVCUDA::BruteForceMatcher_GPU gpuMatcher; gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k); } else { - cv::gpu::BruteForceMatcher_GPU > gpuMatcher; + CVCUDA::BruteForceMatcher_GPU > gpuMatcher; gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k); } +#else +#ifdef HAVE_OPENCV_CUDAFEATURES2D + cv::Ptr gpuMatcher; + if(indexedDescriptors_.type()==CV_8U) + { + gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_HAMMING); + gpuMatcher->knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k); + } + else + { + gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_L2); + gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k); + } +#endif +#endif } else { diff --git a/tools/similarity/main.cpp b/tools/similarity/main.cpp index 61da202e..ba63c640 100644 --- a/tools/similarity/main.cpp +++ b/tools/similarity/main.cpp @@ -32,8 +32,19 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include #include #include -#include #include // for homography +#include + +#ifdef HAVE_OPENCV_NONFREE + #if CV_MAJOR_VERSION == 2 && CV_MINOR_VERSION >=4 + #include + #include + #endif +#endif +#ifdef HAVE_OPENCV_XFEATURES2D + #include + #include +#endif void showUsage() { @@ -93,6 +104,7 @@ int main(int argc, char * argv[]) cv::Mat objectDescriptors; cv::Mat sceneDescriptors; +#if CV_MAJOR_VERSION < 3 //////////////////////////// // EXTRACT KEYPOINTS //////////////////////////// @@ -105,7 +117,20 @@ int main(int argc, char * argv[]) //////////////////////////// sift.compute(objectImg, objectKeypoints, objectDescriptors); sift.compute(sceneImg, sceneKeypoints, sceneDescriptors); +#else + //////////////////////////// + // EXTRACT KEYPOINTS + //////////////////////////// + cv::Ptr sift = cv::xfeatures2d::SIFT::create(); + sift->detect(objectImg, objectKeypoints); + sift->detect(sceneImg, sceneKeypoints); + //////////////////////////// + // EXTRACT DESCRIPTORS + //////////////////////////// + sift->compute(objectImg, objectKeypoints, objectDescriptors); + sift->compute(sceneImg, sceneKeypoints, sceneDescriptors); +#endif //////////////////////////// // NEAREST NEIGHBOR MATCHING USING FLANN LIBRARY (included in OpenCV) ////////////////////////////