New camera interface using signals/slots (for ROS nodes)

git-svn-id: http://find-object.googlecode.com/svn/trunk/find_object@34 620bd6b2-0a58-f614-fd9a-1bd335dccda9
This commit is contained in:
matlabbe 2011-11-23 16:44:14 +00:00
parent 45f6b9fd17
commit acb12ac688
11 changed files with 267 additions and 300 deletions

View File

@ -5,6 +5,7 @@
#include "KeypointItem.h"
#include "Camera.h"
#include "qtipl.h"
#include "Settings.h"
#include <stdio.h>
@ -15,19 +16,15 @@
#include <opencv2/imgproc/imgproc_c.h>
#include <opencv2/highgui/highgui_c.h>
AddObjectDialog::AddObjectDialog(QList<ObjWidget*> * objects, QWidget * parent, Qt::WindowFlags f) :
AddObjectDialog::AddObjectDialog(Camera * camera, QList<ObjWidget*> * objects, QWidget * parent, Qt::WindowFlags f) :
QDialog(parent, f),
camera_(0),
camera_(camera),
objects_(objects),
cvImage_(0)
{
ui_ = new Ui_addObjectDialog();
ui_->setupUi(this);
camera_ = Settings::createCamera();
connect(&cameraTimer_, SIGNAL(timeout()), this, SLOT(update()));
connect(ui_->pushButton_cancel, SIGNAL(clicked()), this, SLOT(cancel()));
connect(ui_->pushButton_back, SIGNAL(clicked()), this, SLOT(back()));
connect(ui_->pushButton_next, SIGNAL(clicked()), this, SLOT(next()));
@ -44,15 +41,12 @@ AddObjectDialog::~AddObjectDialog()
{
cvReleaseImage(&cvImage_);
}
if(camera_)
{
delete camera_;
}
delete ui_;
}
void AddObjectDialog::closeEvent(QCloseEvent* event)
{
disconnect(camera_, SIGNAL(imageReceived(const cv::Mat &)), this, SLOT(update(const cv::Mat &)));
QDialog::closeEvent(event);
}
@ -102,20 +96,20 @@ void AddObjectDialog::setState(int state)
ui_->cameraView->setVisible(true);
ui_->objectView->setVisible(false);
ui_->cameraView->setGraphicsViewMode(false);
if(!camera_->init())
if(!camera_ || !camera_->start())
{
QMessageBox::critical(this, tr("Camera error"), tr("Camera initialization failed! (with device %1)").arg(Settings::getCamera_deviceId().toInt()));
QMessageBox::critical(this, tr("Camera error"), tr("Camera is not started!"));
ui_->pushButton_takePicture->setEnabled(false);
}
else
{
cameraTimer_.start(1000/Settings::getCamera_imageRate().toInt());
connect(camera_, SIGNAL(imageReceived(const cv::Mat &)), this, SLOT(update(const cv::Mat &)));
}
}
else if(state == kSelectFeatures)
{
cameraTimer_.stop();
camera_->close();
disconnect(camera_, SIGNAL(imageReceived(const cv::Mat &)), this, SLOT(update(const cv::Mat &)));
camera_->stop();
ui_->pushButton_cancel->setEnabled(true);
ui_->pushButton_back->setEnabled(true);
@ -130,8 +124,8 @@ void AddObjectDialog::setState(int state)
}
else if(state == kVerifySelection)
{
cameraTimer_.stop();
camera_->close();
disconnect(camera_, SIGNAL(imageReceived(const cv::Mat &)), this, SLOT(update(const cv::Mat &)));
camera_->stop();
ui_->pushButton_cancel->setEnabled(true);
ui_->pushButton_back->setEnabled(true);
@ -204,14 +198,15 @@ void AddObjectDialog::setState(int state)
}
}
void AddObjectDialog::update()
void AddObjectDialog::update(const cv::Mat & image)
{
if(cvImage_)
{
cvReleaseImage(&cvImage_);
cvImage_ = 0;
}
cvImage_ = camera_->takeImage();
IplImage iplImage = image;
cvImage_ = cvCloneImage(& iplImage);
if(cvImage_)
{
// convert to grayscale

View File

@ -16,11 +16,11 @@ class AddObjectDialog : public QDialog {
Q_OBJECT
public:
AddObjectDialog(QList<ObjWidget*> * objects, QWidget * parent = 0, Qt::WindowFlags f = 0);
AddObjectDialog(Camera * camera, QList<ObjWidget*> * objects, QWidget * parent = 0, Qt::WindowFlags f = 0);
virtual ~AddObjectDialog();
private slots:
void update();
void update(const cv::Mat &);
void next();
void back();
void cancel();
@ -36,7 +36,6 @@ private:
private:
Ui_addObjectDialog * ui_;
Camera * camera_;
QTimer cameraTimer_;
QList<ObjWidget*> * objects_;
IplImage * cvImage_;

View File

@ -8,45 +8,23 @@
#include "Camera.h"
#include <stdio.h>
#include <opencv2/imgproc/imgproc_c.h>
#include "Settings.h"
Camera::Camera(int deviceId,
int imageWidth,
int imageHeight,
QObject * parent) :
Camera::Camera(QObject * parent) :
QObject(parent),
capture_(0),
deviceId_(deviceId),
imageWidth_(imageWidth),
imageHeight_(imageHeight)
capture_(0)
{
connect(&cameraTimer_, SIGNAL(timeout()), this, SLOT(takeImage()));
}
Camera::~Camera()
{
this->close();
this->stop();
}
bool Camera::init()
{
if(!capture_)
{
capture_ = cvCaptureFromCAM(deviceId_);
if(capture_)
{
cvSetCaptureProperty(capture_, CV_CAP_PROP_FRAME_WIDTH, double(imageWidth_));
cvSetCaptureProperty(capture_, CV_CAP_PROP_FRAME_HEIGHT, double(imageHeight_));
}
}
if(!capture_)
{
printf("Failed to create a capture object!\n");
return false;
}
return true;
}
void Camera::close()
void Camera::stop()
{
cameraTimer_.stop();
if(capture_)
{
cvReleaseCapture(&capture_);
@ -54,11 +32,11 @@ void Camera::close()
}
}
IplImage * Camera::takeImage()
void Camera::takeImage()
{
IplImage * img = 0;
if(capture_)
{
IplImage * img = 0;
if(cvGrabFrame(capture_)) // capture a frame
{
img = cvRetrieveFrame(capture_); // retrieve the captured frame
@ -67,29 +45,57 @@ IplImage * Camera::takeImage()
{
printf("CameraVideo: Could not grab a frame, the end of the feed may be reached...\n");
}
}
//resize
if(img &&
imageWidth_ &&
imageHeight_ &&
imageWidth_ != (unsigned int)img->width &&
imageHeight_ != (unsigned int)img->height)
{
// declare a destination IplImage object with correct size, depth and channels
IplImage * resampledImg = cvCreateImage( cvSize(imageWidth_, imageHeight_),
img->depth,
img->nChannels );
//resize
if(img &&
Settings::getCamera_imageWidth().toInt() &&
Settings::getCamera_imageHeight().toInt() &&
Settings::getCamera_imageWidth().toInt() != (unsigned int)img->width &&
Settings::getCamera_imageHeight().toInt() != (unsigned int)img->height)
{
// declare a destination IplImage object with correct size, depth and channels
cv::Mat imgMat(cvSize(Settings::getCamera_imageWidth().toInt(), Settings::getCamera_imageHeight().toInt()),
img->depth,
img->nChannels );
//use cvResize to resize source to a destination image (linear interpolation)
cvResize(img, resampledImg);
img = resampledImg;
//use cvResize to resize source to a destination image (linear interpolation)
IplImage resampledImg = imgMat;
cvResize(img, &resampledImg);
emit imageReceived(imgMat);
}
else
{
emit imageReceived(cv::Mat(img, true));
}
}
else if(img)
{
img = cvCloneImage(img);
}
return img;
}
bool Camera::start()
{
if(!capture_)
{
if(!capture_)
{
capture_ = cvCaptureFromCAM(Settings::getCamera_deviceId().toInt());
if(capture_)
{
cvSetCaptureProperty(capture_, CV_CAP_PROP_FRAME_WIDTH, double(Settings::getCamera_imageWidth().toInt()));
cvSetCaptureProperty(capture_, CV_CAP_PROP_FRAME_HEIGHT, double(Settings::getCamera_imageHeight().toInt()));
}
}
if(!capture_)
{
printf("Failed to create a capture object!\n");
return false;
}
}
cameraTimer_.start(1000/Settings::getCamera_imageRate().toInt());
return true;
}
void Camera::updateImageRate()
{
cameraTimer_.setInterval(1000/Settings::getCamera_imageRate().toInt());
}

View File

@ -10,38 +10,29 @@
#include <opencv2/highgui/highgui.hpp>
#include <QtCore/QObject>
#include "Settings.h"
#include <QtCore/QTimer>
class Camera : public QObject {
Q_OBJECT
public:
Camera(int deviceId = Settings::defaultCamera_deviceId(),
int width = Settings::defaultCamera_imageWidth(),
int height = Settings::defaultCamera_imageHeight(),
QObject * parent = 0);
Camera(QObject * parent = 0);
virtual ~Camera();
void setDeviceId(int deviceId) {deviceId_=deviceId;}
void setImageWidth(int imageWidth) {imageWidth_=imageWidth;}
void setImageHeight(int imageHeight) {imageHeight_=imageHeight;}
int getDeviceId() const {return deviceId_;}
int getImageWidth() const {return imageWidth_;}
int getImageHeight() const {return imageHeight_;}
virtual bool start();
virtual void stop();
signals:
void ready();
void imageReceived(const cv::Mat & image);
public:
bool init();
void close();
IplImage * takeImage();
public slots:
void updateImageRate();
private slots:
void takeImage();
private:
CvCapture * capture_;
int deviceId_;
int imageWidth_;
int imageHeight_;
QTimer cameraTimer_;
};
#endif /* CAMERA_H_ */

View File

@ -21,21 +21,34 @@
#include <QtGui/QMessageBox>
#include <QtGui/QGraphicsScene>
#include <QtGui/QGraphicsRectItem>
#include <QtGui/QSpinBox>
MainWindow::MainWindow(QWidget * parent) :
// Camera ownership transferred
MainWindow::MainWindow(Camera * camera, QWidget * parent) :
QMainWindow(parent),
camera_(0)
camera_(camera)
{
ui_ = new Ui_mainWindow();
ui_->setupUi(this);
connect(&cameraTimer_, SIGNAL(timeout()), this, SLOT(update()));
if(!camera_)
{
camera_ = new Camera(this);
}
else
{
camera_->setParent(this);
}
QByteArray geometry;
Settings::loadSettings(Settings::iniDefaultPath(), &geometry);
this->restoreGeometry(geometry);
ui_->toolBox->setupUi();
connect((QSpinBox*)ui_->toolBox->getParameterWidget(Settings::kCamera_imageRate()),
SIGNAL(editingFinished()),
camera_,
SLOT(updateImageRate()));
ui_->dockWidget_parameters->hide();
ui_->menuView->addAction(ui_->dockWidget_parameters->toggleViewAction());
ui_->menuView->addAction(ui_->dockWidget_objects->toggleViewAction());
@ -59,12 +72,8 @@ MainWindow::MainWindow(QWidget * parent) :
MainWindow::~MainWindow()
{
this->stopCamera();
if(camera_)
{
delete camera_;
camera_ = 0;
}
disconnect(camera_, SIGNAL(imageReceived(const cv::Mat &)), this, SLOT(update(const cv::Mat &)));
camera_->stop();
dataTree_ = cv::Mat();
qDeleteAll(objects_.begin(), objects_.end());
objects_.clear();
@ -145,7 +154,7 @@ void MainWindow::removeObject(ObjWidget * object)
void MainWindow::addObject()
{
this->stopCamera();
AddObjectDialog dialog(&objects_, this);
AddObjectDialog dialog(camera_, &objects_, this);
if(dialog.exec() == QDialog::Accepted)
{
showObject(objects_.last());
@ -248,16 +257,9 @@ void MainWindow::updateData()
void MainWindow::startCamera()
{
if(camera_)
if(camera_->start())
{
camera_->close();
delete camera_;
}
camera_ = Settings::createCamera();
connect(camera_, SIGNAL(ready()), this, SLOT(update()));
if(camera_->init())
{
cameraTimer_.start(1000/Settings::getCamera_imageRate().toInt());
connect(camera_, SIGNAL(imageReceived(const cv::Mat &)), this, SLOT(update(const cv::Mat &)));
ui_->actionStop_camera->setEnabled(true);
ui_->actionStart_camera->setEnabled(false);
}
@ -271,16 +273,14 @@ void MainWindow::stopCamera()
{
if(camera_)
{
cameraTimer_.stop();
camera_->close();
delete camera_;
camera_ = 0;
disconnect(camera_, SIGNAL(imageReceived(const cv::Mat &)), this, SLOT(update(const cv::Mat &)));
camera_->stop();
}
ui_->actionStop_camera->setEnabled(false);
ui_->actionStart_camera->setEnabled(true);
}
void MainWindow::update()
void MainWindow::update(const cv::Mat & image)
{
// reset objects color
for(int i=0; i<objects_.size(); ++i)
@ -288,188 +288,184 @@ void MainWindow::update()
objects_[i]->resetKptsColor();
}
if(camera_)
if(!image.empty())
{
IplImage * cvImage = camera_->takeImage();
if(cvImage)
IplImage iplImage = image;
QTime time;
time.start();
//Convert to grayscale
IplImage * imageGrayScale = 0;
if(iplImage.nChannels != 1 || iplImage.depth != IPL_DEPTH_8U)
{
QTime time;
time.start();
imageGrayScale = cvCreateImage(cvSize(iplImage.width,iplImage.height), IPL_DEPTH_8U, 1);
cvCvtColor(&iplImage, imageGrayScale, CV_BGR2GRAY);
}
cv::Mat img;
if(imageGrayScale)
{
img = cv::Mat(imageGrayScale);
}
else
{
img = cv::Mat(&iplImage);
}
//Convert to grayscale
IplImage * imageGrayScale = 0;
if(cvImage->nChannels != 1 || cvImage->depth != IPL_DEPTH_8U)
// EXTRACT KEYPOINTS
cv::FeatureDetector * detector = Settings::createFeaturesDetector();
std::vector<cv::KeyPoint> keypoints;
detector->detect(img, keypoints);
delete detector;
ui_->label_timeDetection->setText(QString::number(time.restart()));
// EXTRACT DESCRIPTORS
cv::Mat descriptors;
cv::DescriptorExtractor * extractor = Settings::createDescriptorsExtractor();
extractor->compute(img, keypoints, descriptors);
delete extractor;
if(keypoints.size() != descriptors.rows)
{
printf("ERROR : kpt=%lu != descriptors=%d\n", keypoints.size(), descriptors.rows);
}
if(imageGrayScale)
{
cvReleaseImage(&imageGrayScale);
}
ui_->label_timeExtraction->setText(QString::number(time.restart()));
// COMPARE
int alpha = 20*255/100;
if(!dataTree_.empty())
{
// CREATE INDEX
cv::Mat environment(descriptors.rows, descriptors.cols, CV_32F);
descriptors.convertTo(environment, CV_32F);
cv::flann::Index treeFlannIndex(environment, cv::flann::KDTreeIndexParams());
ui_->label_timeIndexing->setText(QString::number(time.restart()));
// DO NEAREST NEIGHBOR
int k = 2;
int emax = 64;
cv::Mat results(dataTree_.rows, k, CV_32SC1); // results index
cv::Mat dists(dataTree_.rows, k, CV_32FC1); // Distance results are CV_32FC1
treeFlannIndex.knnSearch(dataTree_, results, dists, k, cv::flann::SearchParams(emax) ); // maximum number of leafs checked
ui_->label_timeMatching->setText(QString::number(time.restart()));
// PROCESS RESULTS
ui_->imageView_source->setData(keypoints, cv::Mat(), &iplImage);
int j=0;
std::vector<cv::Point2f> mpts_1, mpts_2;
std::vector<int> indexes_1, indexes_2;
std::vector<uchar> outlier_mask;
for(unsigned int i=0; i<dataTree_.rows; ++i)
{
imageGrayScale = cvCreateImage(cvSize(cvImage->width,cvImage->height), IPL_DEPTH_8U, 1);
cvCvtColor(cvImage, imageGrayScale, CV_BGR2GRAY);
}
cv::Mat img;
if(imageGrayScale)
{
img = cv::Mat(imageGrayScale);
}
else
{
img = cv::Mat(cvImage);
}
// EXTRACT KEYPOINTS
cv::FeatureDetector * detector = Settings::createFeaturesDetector();
std::vector<cv::KeyPoint> keypoints;
detector->detect(img, keypoints);
delete detector;
ui_->label_timeDetection->setText(QString::number(time.restart()));
// EXTRACT DESCRIPTORS
cv::Mat descriptors;
cv::DescriptorExtractor * extractor = Settings::createDescriptorsExtractor();
extractor->compute(img, keypoints, descriptors);
delete extractor;
if(keypoints.size() != descriptors.rows)
{
printf("ERROR : kpt=%lu != descriptors=%d\n", keypoints.size(), descriptors.rows);
}
if(imageGrayScale)
{
cvReleaseImage(&imageGrayScale);
}
ui_->label_timeExtraction->setText(QString::number(time.restart()));
// COMPARE
int alpha = 20*255/100;
if(!dataTree_.empty())
{
// CREATE INDEX
cv::Mat environment(descriptors.rows, descriptors.cols, CV_32F);
descriptors.convertTo(environment, CV_32F);
cv::flann::Index treeFlannIndex(environment, cv::flann::KDTreeIndexParams());
ui_->label_timeIndexing->setText(QString::number(time.restart()));
// DO NEAREST NEIGHBOR
int k = 2;
int emax = 64;
cv::Mat results(dataTree_.rows, k, CV_32SC1); // results index
cv::Mat dists(dataTree_.rows, k, CV_32FC1); // Distance results are CV_32FC1
treeFlannIndex.knnSearch(dataTree_, results, dists, k, cv::flann::SearchParams(emax) ); // maximum number of leafs checked
ui_->label_timeMatching->setText(QString::number(time.restart()));
// PROCESS RESULTS
ui_->imageView_source->setData(keypoints, cv::Mat(), cvImage);
int j=0;
std::vector<cv::Point2f> mpts_1, mpts_2;
std::vector<int> indexes_1, indexes_2;
std::vector<uchar> outlier_mask;
for(unsigned int i=0; i<dataTree_.rows; ++i)
// Check if this descriptor matches with those of the objects
// Apply NNDR
if(dists.at<float>(i,0) <= Settings::getNearestNeighbor_nndrRatio().toFloat() * dists.at<float>(i,1))
{
// Check if this descriptor matches with those of the objects
// Apply NNDR
if(dists.at<float>(i,0) <= Settings::getNearestNeighbor_nndrRatio().toFloat() * dists.at<float>(i,1))
if(j>0)
{
if(j>0)
{
mpts_1.push_back(objects_.at(j)->keypoints().at(i-dataRange_.at(j-1)).pt);
indexes_1.push_back(i-dataRange_.at(j-1));
}
else
{
mpts_1.push_back(objects_.at(j)->keypoints().at(i).pt);
indexes_1.push_back(i);
}
mpts_2.push_back(ui_->imageView_source->keypoints().at(results.at<int>(i,0)).pt);
indexes_2.push_back(results.at<int>(i,0));
mpts_1.push_back(objects_.at(j)->keypoints().at(i-dataRange_.at(j-1)).pt);
indexes_1.push_back(i-dataRange_.at(j-1));
}
if(i+1 >= dataRange_.at(j))
else
{
QLabel * label = ui_->dockWidget_objects->findChild<QLabel*>(QString("%1detection").arg(objects_.at(j)->id()));
if(mpts_1.size() >= Settings::getHomography_minimumInliers().toInt())
mpts_1.push_back(objects_.at(j)->keypoints().at(i).pt);
indexes_1.push_back(i);
}
mpts_2.push_back(ui_->imageView_source->keypoints().at(results.at<int>(i,0)).pt);
indexes_2.push_back(results.at<int>(i,0));
}
if(i+1 >= dataRange_.at(j))
{
QLabel * label = ui_->dockWidget_objects->findChild<QLabel*>(QString("%1detection").arg(objects_.at(j)->id()));
if(mpts_1.size() >= Settings::getHomography_minimumInliers().toInt())
{
cv::Mat H = findHomography(mpts_1,
mpts_2,
cv::RANSAC,
Settings::getHomography_ransacReprojThr().toDouble(),
outlier_mask);
int inliers=0, outliers=0;
QColor color((Qt::GlobalColor)(j % 12 + 7 ));
color.setAlpha(alpha);
for(int k=0; k<mpts_1.size();++k)
{
if(outlier_mask.at(k))
{
++inliers;
}
else
{
++outliers;
}
}
// COLORIZE
if(inliers >= Settings::getHomography_minimumInliers().toInt())
{
cv::Mat H = findHomography(mpts_1,
mpts_2,
cv::RANSAC,
Settings::getHomography_ransacReprojThr().toDouble(),
outlier_mask);
int inliers=0, outliers=0;
QColor color((Qt::GlobalColor)(j % 12 + 7 ));
color.setAlpha(alpha);
for(int k=0; k<mpts_1.size();++k)
{
if(outlier_mask.at(k))
{
++inliers;
objects_.at(j)->setKptColor(indexes_1.at(k), color);
ui_->imageView_source->setKptColor(indexes_2.at(k), color);
}
else
{
++outliers;
objects_.at(j)->setKptColor(indexes_1.at(k), QColor(0,0,0,alpha));
}
}
// COLORIZE
if(inliers >= Settings::getHomography_minimumInliers().toInt())
{
for(int k=0; k<mpts_1.size();++k)
{
if(outlier_mask.at(k))
{
objects_.at(j)->setKptColor(indexes_1.at(k), color);
ui_->imageView_source->setKptColor(indexes_2.at(k), color);
}
else
{
objects_.at(j)->setKptColor(indexes_1.at(k), QColor(0,0,0,alpha));
}
}
label->setText(QString("%1 in %2 out").arg(inliers).arg(outliers));
QTransform hTransform(
H.at<double>(0,0), H.at<double>(1,0), H.at<double>(2,0),
H.at<double>(0,1), H.at<double>(1,1), H.at<double>(2,1),
H.at<double>(0,2), H.at<double>(1,2), H.at<double>(2,2));
QPen rectPen(color);
rectPen.setWidth(4);
QGraphicsRectItem * rectItem = new QGraphicsRectItem(objects_.at(j)->image().rect());
rectItem->setPen(rectPen);
rectItem->setTransform(hTransform);
ui_->imageView_source->addRect(rectItem);
label->setText(QString("%1 in %2 out").arg(inliers).arg(outliers));
QTransform hTransform(
H.at<double>(0,0), H.at<double>(1,0), H.at<double>(2,0),
H.at<double>(0,1), H.at<double>(1,1), H.at<double>(2,1),
H.at<double>(0,2), H.at<double>(1,2), H.at<double>(2,2));
QPen rectPen(color);
rectPen.setWidth(4);
QGraphicsRectItem * rectItem = new QGraphicsRectItem(objects_.at(j)->image().rect());
rectItem->setPen(rectPen);
rectItem->setTransform(hTransform);
ui_->imageView_source->addRect(rectItem);
}
else
{
label->setText(QString("Too low inliers (%1)").arg(inliers));
}
}
else
{
label->setText(QString("Too low matches (%1)").arg(mpts_1.size()));
label->setText(QString("Too low inliers (%1)").arg(inliers));
}
mpts_1.clear();
mpts_2.clear();
indexes_1.clear();
indexes_2.clear();
outlier_mask.clear();
++j;
}
else
{
label->setText(QString("Too low matches (%1)").arg(mpts_1.size()));
}
mpts_1.clear();
mpts_2.clear();
indexes_1.clear();
indexes_2.clear();
outlier_mask.clear();
++j;
}
}
else
{
ui_->imageView_source->setData(keypoints, cv::Mat(), cvImage);
}
//Update object pictures
for(int i=0; i<objects_.size(); ++i)
{
objects_[i]->update();
}
ui_->label_nfeatures->setText(QString::number(keypoints.size()));
ui_->imageView_source->update();
ui_->label_timeGui->setText(QString::number(time.restart()));
cvReleaseImage(&cvImage);
}
else
{
ui_->imageView_source->setData(keypoints, cv::Mat(), &iplImage);
}
//Update object pictures
for(int i=0; i<objects_.size(); ++i)
{
objects_[i]->update();
}
ui_->label_nfeatures->setText(QString::number(keypoints.size()));
ui_->imageView_source->update();
ui_->label_timeGui->setText(QString::number(time.restart()));
}
ui_->label_detectorDescriptorType->setText(QString("%1/%2").arg(Settings::currentDetectorType()).arg(Settings::currentDescriptorType()));
ui_->label_timeRefreshRate->setText(QString("(%1 Hz - %2 Hz)").arg(QString::number(1000/cameraTimer_.interval())).arg(QString::number(int(1000.0f/(float)(updateRate_.restart()) + 1))));
ui_->label_timeRefreshRate->setText(QString("(%1 Hz - %2 Hz)").arg(QString::number(Settings::getCamera_imageRate().toInt())).arg(QString::number(int(1000.0f/(float)(updateRate_.restart()) + 1))));
}

View File

@ -23,7 +23,7 @@ class MainWindow : public QMainWindow
Q_OBJECT
public:
MainWindow(QWidget * parent = 0);
MainWindow(Camera * camera = 0, QWidget * parent = 0);
virtual ~MainWindow();
protected:
@ -35,7 +35,7 @@ private slots:
void stopCamera();
void loadObjects();
void saveObjects();
void update();
void update(const cv::Mat & image);
void updateData();
void removeObject(ObjWidget * object);
@ -46,7 +46,6 @@ private:
Ui_mainWindow * ui_;
Camera * camera_;
QList<ObjWidget*> objects_;
QTimer cameraTimer_;
cv::Mat dataTree_;
QList<int> dataRange_;
QTime updateRate_;

View File

@ -1,17 +1,3 @@
/*
* Object.cpp
*
* Created on: 2011-10-23
* Author: matlab
*/
/*
* VisualObject.h
*
* Created on: 2011-10-21
* Author: matlab
*/
#include "ObjWidget.h"
#include "KeypointItem.h"

View File

@ -25,6 +25,11 @@ ParametersToolBox::~ParametersToolBox()
{
}
QWidget * ParametersToolBox::getParameterWidget(const QString & key)
{
return this->findChild<QWidget*>(key);
}
void ParametersToolBox::resetCurrentPage()
{
const QObjectList & children = this->widget(this->currentIndex())->children();

View File

@ -22,6 +22,7 @@ public:
virtual ~ParametersToolBox();
void setupUi();
QWidget * getParameterWidget(const QString & key);
private:
void addParameter(QVBoxLayout * layout, const QString & key, const QVariant & value);
@ -36,7 +37,6 @@ private slots:
void changeParameter(const QString & value);
void changeParameter(const int & value);
void resetCurrentPage();
};
#endif /* PARAMETERSTOOLBOX_H_ */

View File

@ -291,14 +291,6 @@ cv::DescriptorExtractor * Settings::createDescriptorsExtractor()
return extractor;
}
Camera * Settings::createCamera()
{
return new Camera(
getCamera_deviceId().toInt(),
getCamera_imageWidth().toInt(),
getCamera_imageHeight().toInt());
}
QString Settings::currentDetectorType()
{
int index = Settings::getDetector_Type().toString().split(':').first().toInt();

View File

@ -136,8 +136,6 @@ public:
static QString currentDescriptorType();
static QString currentDetectorType();
static Camera * createCamera();
private:
Settings(){}