diff --git a/CMakeLists.txt b/CMakeLists.txt index 828d2d57..0a9ef324 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -64,6 +64,8 @@ ADD_DEFINITIONS(-DQT_NO_KEYWORDS) # To avoid conflicts with boost signals used i FIND_PACKAGE(Tcmalloc QUIET) +FIND_PACKAGE(ZLIB REQUIRED QUIET) + SET(NONFREE 0) IF(OPENCV_NONFREE_FOUND OR OPENCV_XFEATURES2D_FOUND) SET(NONFREE 1) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index b96addb7..2793d6fe 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -74,6 +74,7 @@ SET(SRC_FILES ./utilite/UConversion.cpp ./rtabmap/PdfPlot.cpp ./json/jsoncpp.cpp + ./Compression.cpp ${moc_srcs} ${moc_uis} ${srcs_qrc} @@ -93,6 +94,7 @@ SET(INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR} ${OpenCV_INCLUDE_DIRS} ${CMAKE_CURRENT_BINARY_DIR} # for qt ui generated in binary dir + ${ZLIB_INCLUDE_DIRS} ) IF(CATKIN_BUILD) SET(INCLUDE_DIRS @@ -108,6 +110,7 @@ ENDIF(QT4_FOUND) SET(LIBRARIES ${QT_LIBRARIES} ${OpenCV_LIBS} + ${ZLIB_LIBRARIES} ) IF(CATKIN_BUILD) SET(LIBRARIES diff --git a/src/Compression.cpp b/src/Compression.cpp new file mode 100644 index 00000000..dfcd89e9 --- /dev/null +++ b/src/Compression.cpp @@ -0,0 +1,79 @@ +/* + * Compression.cpp + * + * Created on: Sep 10, 2018 + * Author: labm2414 + */ + +#include +#include +#include "find_object/utilite/ULogger.h" + +namespace find_object { + +std::vector compressData(const cv::Mat & data) +{ + std::vector bytes; + if(!data.empty()) + { + uLong sourceLen = uLong(data.total())*uLong(data.elemSize()); + uLong destLen = compressBound(sourceLen); + bytes.resize(destLen); + int errCode = compress( + (Bytef *)bytes.data(), + &destLen, + (const Bytef *)data.data, + sourceLen); + + bytes.resize(destLen+3*sizeof(int)); + *((int*)&bytes[destLen]) = data.rows; + *((int*)&bytes[destLen+sizeof(int)]) = data.cols; + *((int*)&bytes[destLen+2*sizeof(int)]) = data.type(); + + if(errCode == Z_MEM_ERROR) + { + UERROR("Z_MEM_ERROR : Insufficient memory."); + } + else if(errCode == Z_BUF_ERROR) + { + UERROR("Z_BUF_ERROR : The buffer dest was not large enough to hold the uncompressed data."); + } + } + return bytes; +} + +cv::Mat uncompressData(const unsigned char * bytes, unsigned long size) +{ + cv::Mat data; + if(bytes && size>=3*sizeof(int)) + { + //last 3 int elements are matrix size and type + int height = *((int*)&bytes[size-3*sizeof(int)]); + int width = *((int*)&bytes[size-2*sizeof(int)]); + int type = *((int*)&bytes[size-1*sizeof(int)]); + + data = cv::Mat(height, width, type); + uLongf totalUncompressed = uLongf(data.total())*uLongf(data.elemSize()); + + int errCode = uncompress( + (Bytef*)data.data, + &totalUncompressed, + (const Bytef*)bytes, + uLong(size)); + + if(errCode == Z_MEM_ERROR) + { + UERROR("Z_MEM_ERROR : Insufficient memory."); + } + else if(errCode == Z_BUF_ERROR) + { + UERROR("Z_BUF_ERROR : The buffer dest was not large enough to hold the uncompressed data."); + } + else if(errCode == Z_DATA_ERROR) + { + UERROR("Z_DATA_ERROR : The compressed data (referenced by source) was corrupted."); + } + } + return data; +} +} /* namespace find_object */ diff --git a/src/Compression.h b/src/Compression.h new file mode 100644 index 00000000..1676c8ea --- /dev/null +++ b/src/Compression.h @@ -0,0 +1,20 @@ +/* + * Compression.h + * + * Created on: Sep 10, 2018 + * Author: labm2414 + */ + +#ifndef SRC_COMPRESSION_H_ +#define SRC_COMPRESSION_H_ + +#include + +namespace find_object { + +std::vector compressData(const cv::Mat & data); +cv::Mat uncompressData(const unsigned char * bytes, unsigned long size); + +} + +#endif /* SRC_COMPRESSION_H_ */ diff --git a/src/FindObject.cpp b/src/FindObject.cpp index 8d762240..187bd6fe 100644 --- a/src/FindObject.cpp +++ b/src/FindObject.cpp @@ -735,7 +735,7 @@ protected: { QTime time; time.start(); - UINFO("Extracting descriptors from object %d...", objectId_); + UDEBUG("Extracting descriptors from object %d...", objectId_); QTime timeStep; timeStep.start(); @@ -850,6 +850,7 @@ private: void FindObject::updateObjects(const QList & ids) { + UINFO("Update %d objects...", ids.size()); QList objectsList; if(ids.size()) { @@ -884,7 +885,7 @@ void FindObject::updateObjects(const QList & ids) if(objectsList.size()) { - UINFO("Features extraction from %d objects...", objectsList.size()); + UINFO("Features extraction from %d objects... (threads=%d)", objectsList.size(), threadCounts); for(int i=0; i threads; @@ -995,6 +996,8 @@ void FindObject::updateVocabulary(const QList & ids) } } + UINFO("Updating vocabulary with %d objects and %d descriptors...", ids.size(), count); + // Copy data if(count) { @@ -1100,6 +1103,10 @@ void FindObject::updateVocabulary(const QList & ids) } if(addedWords && !Settings::getGeneral_vocabularyFixed()) { + if(!incremental) + { + UINFO("Updating vocabulary..."); + } vocabulary_->update(); } diff --git a/src/MainWindow.cpp b/src/MainWindow.cpp index 13a805f9..e7087ed8 100644 --- a/src/MainWindow.cpp +++ b/src/MainWindow.cpp @@ -244,15 +244,23 @@ MainWindow::MainWindow(FindObject * findObject, Camera * camera, QWidget * paren if(findObject_->objects().size()) { + UINFO("Creating %d object widgets...", findObject_->objects().size()); // show objects already loaded in FindObject + int i=0; for(QMap::const_iterator iter = findObject_->objects().constBegin(); iter!=findObject_->objects().constEnd(); ++iter) { - ObjWidget * obj = new ObjWidget(iter.key(), iter.value()->keypoints(), iter.value()->words(), cvtCvMat2QImage(iter.value()->image())); + ObjWidget * obj = new ObjWidget(iter.key(), iter.value()->keypoints(), iter.value()->words(), iter.value()->image().empty()?QImage():cvtCvMat2QImage(iter.value()->image())); objWidgets_.insert(obj->id(), obj); this->showObject(obj); + ++i; + if(i % 100 == 0) + { + UINFO("Created %d/%d widgets...", i, findObject_->objects().size()); + } } + UINFO("Creating %d object widgets... done!", findObject_->objects().size()); ui_->actionSave_objects->setEnabled(true); ui_->actionSave_session->setEnabled(true); } @@ -1045,9 +1053,12 @@ void MainWindow::showObject(ObjWidget * obj) ui_->verticalLayout_objects->insertLayout(ui_->verticalLayout_objects->count()-1, vLayout); QByteArray ba; - QBuffer buffer(&ba); - buffer.open(QIODevice::WriteOnly); - obj->pixmap().scaledToWidth(128).save(&buffer, "JPEG"); // writes image into JPEG format + if(obj->pixmap().width() > 0) + { + QBuffer buffer(&ba); + buffer.open(QIODevice::WriteOnly); + obj->pixmap().scaledToWidth(128).save(&buffer, "JPEG"); // writes image into JPEG format + } imagesMap_.insert(obj->id(), ba); // update objects size slider @@ -1411,6 +1422,14 @@ void MainWindow::update(const cv::Mat & image) // add rectangle QPen rectPen(obj->color()); rectPen.setWidth(Settings::getHomography_rectBorderWidth()); + if(rect.isNull()) + { + QMap::const_iterator iter = findObject_->objects().constFind(id); + if(iter!=findObject_->objects().end()) + { + rect = iter.value()->rect(); + } + } RectItem * rectItemScene = new RectItem(id, rect); connect(rectItemScene, SIGNAL(hovered(int)), this, SLOT(rectHovered(int))); rectItemScene->setPen(rectPen); diff --git a/src/ObjSignature.h b/src/ObjSignature.h index 300ec670..5ca14c92 100644 --- a/src/ObjSignature.h +++ b/src/ObjSignature.h @@ -34,6 +34,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include #include #include +#include +#include namespace find_object { @@ -84,18 +86,46 @@ public: keypoints_.at(j).size; } - qint64 dataSize = descriptors_.elemSize()*descriptors_.cols*descriptors_.rows; - streamPtr << descriptors_.rows << - descriptors_.cols << - descriptors_.type() << - dataSize; - streamPtr << QByteArray((char*)descriptors_.data, dataSize); + std::vector bytes = compressData(descriptors_); + + qint64 dataSize = bytes.size(); + int old = 0; + if(dataSize <= std::numeric_limits::max()) + { + // old: rows, cols, type + streamPtr << old << old << old << dataSize; + streamPtr << QByteArray::fromRawData((const char*)bytes.data(), dataSize); + } + else + { + UERROR("Descriptors (compressed) are too large (%d MB) to be saved! Limit is 2 GB (based on max QByteArray size).", + dataSize/(1024*1024)); + // old: rows, cols, type, dataSize + streamPtr << old << old << old << old; + streamPtr << QByteArray(); // empty + } streamPtr << words_; - std::vector bytes; - cv::imencode(".png", image_, bytes); - streamPtr << QByteArray((char*)bytes.data(), (int)bytes.size()); + if(!image_.empty()) + { + std::vector bytes; + QString ext = QFileInfo(filePath_).suffix(); + if(ext.isEmpty()) + { + // default png + cv::imencode(".png", image_, bytes); + } + else + { + cv::imencode(std::string(".")+ext.toStdString(), image_, bytes); + } + streamPtr << QByteArray::fromRawData((const char*)bytes.data(), (int)bytes.size()); + } + else + { + streamPtr << QByteArray(); + } streamPtr << rect_; } @@ -120,15 +150,34 @@ public: int rows,cols,type; qint64 dataSize; streamPtr >> rows >> cols >> type >> dataSize; - QByteArray data; - streamPtr >> data; - descriptors_ = cv::Mat(rows, cols, type, data.data()).clone(); + if(rows == 0 && cols == 0 && type == 0) + { + // compressed descriptors + UASSERT(dataSize <= std::numeric_limits::max()); + QByteArray data; + streamPtr >> data; + descriptors_ = uncompressData((unsigned const char*)data.data(), dataSize); + } + else + { + // old raw format + QByteArray data; + streamPtr >> data; + if(data.size()) + { + descriptors_ = cv::Mat(rows, cols, type, data.data()).clone(); + } + else if(dataSize) + { + UERROR("Error reading descriptor data for object=%d", id_); + } + } streamPtr >> words_; QByteArray image; streamPtr >> image; - if(!ignoreImage) + if(!ignoreImage && image.size()) { std::vector bytes(image.size()); memcpy(bytes.data(), image.data(), image.size()); diff --git a/src/ObjWidget.cpp b/src/ObjWidget.cpp index f882f656..ccaef110 100644 --- a/src/ObjWidget.cpp +++ b/src/ObjWidget.cpp @@ -266,8 +266,13 @@ void ObjWidget::setTextLabel(const QString & text) void ObjWidget::updateImage(const QImage & image) { - pixmap_ = QPixmap::fromImage(image); - rect_ = pixmap_.rect(); + pixmap_ = QPixmap(); + rect_ = QRect(); + if(!image.isNull()) + { + pixmap_ = QPixmap::fromImage(image); + rect_ = pixmap_.rect(); + } label_->setVisible(image.isNull()); } void ObjWidget::updateData(const std::vector & keypoints, const QMultiMap & words) diff --git a/src/Vocabulary.cpp b/src/Vocabulary.cpp index e67eb55a..aeac1572 100644 --- a/src/Vocabulary.cpp +++ b/src/Vocabulary.cpp @@ -28,9 +28,11 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "find_object/Settings.h" #include "find_object/utilite/ULogger.h" +#include "Compression.h" #include "Vocabulary.h" #include #include +#include #include #if CV_MAJOR_VERSION < 3 #include @@ -80,16 +82,31 @@ void Vocabulary::save(QDataStream & streamSessionPtr, bool saveVocabularyOnly) c } else { + UINFO("Saving %d object references...", wordToObjects_.size()); streamSessionPtr << wordToObjects_; } // save words - qint64 dataSize = indexedDescriptors_.elemSize()*indexedDescriptors_.cols*indexedDescriptors_.rows; - streamSessionPtr << indexedDescriptors_.rows << - indexedDescriptors_.cols << - indexedDescriptors_.type() << - dataSize; - streamSessionPtr << QByteArray((char*)indexedDescriptors_.data, dataSize); + qint64 rawDataSize = indexedDescriptors_.rows * indexedDescriptors_.cols * indexedDescriptors_.elemSize(); + UINFO("Compressing words... (%dx%d, %d MB)", indexedDescriptors_.rows, indexedDescriptors_.cols, rawDataSize/(1024*1024)); + std::vector bytes = compressData(indexedDescriptors_); + qint64 dataSize = bytes.size(); + UINFO("Compressed = %d MB", dataSize/(1024*1024)); + int old = 0; + if(dataSize <= std::numeric_limits::max()) + { + // old: rows, cols, type + streamSessionPtr << old << old << old << dataSize; + streamSessionPtr << QByteArray::fromRawData((const char*)bytes.data(), dataSize); + } + else + { + UERROR("Vocabulary (compressed) is too large (%d MB) to be saved! Limit is 2 GB (based on max QByteArray size).", + dataSize/(1024*1024)); + // old: rows, cols, type, dataSize + streamSessionPtr << old << old << old << old; + streamSessionPtr << QByteArray(); // empty + } } void Vocabulary::load(QDataStream & streamSessionPtr, bool loadVocabularyOnly) @@ -104,17 +121,45 @@ void Vocabulary::load(QDataStream & streamSessionPtr, bool loadVocabularyOnly) } else { + UINFO("Loading words to objects references..."); streamSessionPtr >> wordToObjects_; + UINFO("Loaded %d object references...", wordToObjects_.size()); } // load words int rows,cols,type; qint64 dataSize; streamSessionPtr >> rows >> cols >> type >> dataSize; - QByteArray data; - streamSessionPtr >> data; - indexedDescriptors_ = cv::Mat(rows, cols, type, data.data()).clone(); + if(rows == 0 && cols == 0 && type == 0) + { + // compressed vocabulary + UINFO("Loading words... (compressed format: %d MB)", dataSize/(1024*1024)); + UASSERT(dataSize <= std::numeric_limits::max()); + QByteArray data; + streamSessionPtr >> data; + UINFO("Uncompress vocabulary..."); + indexedDescriptors_ = uncompressData((unsigned const char*)data.data(), dataSize); + UINFO("Words: %dx%d (%d MB)", indexedDescriptors_.rows, indexedDescriptors_.cols, + (indexedDescriptors_.rows * indexedDescriptors_.cols * indexedDescriptors_.elemSize()) / (1024*1024)); + } + else + { + // old raw format + UINFO("Loading words... (old format: %dx%d (%d MB))", rows, cols, dataSize/(1024*1024)); + QByteArray data; + streamSessionPtr >> data; + UINFO("Allocate memory..."); + if(data.size()) + { + indexedDescriptors_ = cv::Mat(rows, cols, type, data.data()).clone(); + } + else if(dataSize) + { + UERROR("Error reading vocabulary data..."); + } + } + UINFO("Update vocabulary index..."); update(); }