Search code examples
c++qtcamerastreamingqthread

Qt GUI thread is not responding when camera is streaming


I am using Basler camera to stream images and analyze the image at the same time. The program has camera class, an image detection class, an image acquisition class in a subproject static library. A GUI to show the stream in another GUI subproject. However, when the program is running, the GUI keeps "not responding" while the camera is streaming. What did I do wrong, do I need to implement QThread in this scenario? If I have to use QThread, should I use it in the main GUI thread or in the Acquisition? Camera class to grab image stream from Basler camera:

class ICam: public QObject
    ,public Pylon::CImageEventHandler  {
    Q_OBJECT
    public:
        ICam(QObject *parent = nullptr);
        Mat GetOpencvImage();
        void StartContinuousGrabbing();
    signals:
        void OneImageFinishSignal();
    protected:
        virtual void OnImageGrabbed(Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& grabResult);
    private:
        CBaslerUniversalInstantCamera m_camera;
};

void ICam::OnImageGrabbed(Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& grabResult)
{
    clock.Lock();
    m_ptr_grab_result = grabResult;//Pass the captured image out
    m_bitmap_image.CopyImage(grabResult);
    clock.Unlock();
    emit OneImageFinishSignal();
}

Mat ICam::GetOpencvImage(){
    return cv::Mat(m_bitmap_image.GetHeight(), m_bitmap_image.GetWidth(), CV_8UC3, (uint8_t *)m_bitmap_image.GetBuffer());
}

void ICam::StartContinuousGrabbing(){
    m_camera.StartGrabbing( Pylon::GrabStrategy_OneByOne, Pylon::GrabLoop_ProvidedByInstantCamera);
}

Detect class to do image processing to detect the iris eye:

class Detect : public QObject
{
    Q_OBJECT
    public:
       explicit Detect(QObject *parent = nullptr);
       cv::Point CalculateIrisOffset(cv::Mat img_input);
 };


cv::Point Detect::CalculateIrisOffset(cv::Mat img_input, bool show) {

    //Some Code to detect the iris

    return center_offset;
}

Acquisition class contains an icam object of ICam class and a detect object of Detect class as member attributes, it receives signal from the icam object when an image is grabbed and send signal to the GUI to display the image, at the same time, it calls the detect function of the Detect class to process the image:

class Acquisition: public QObject
{
    Q_OBJECT
    public:
        Acquisition (QObject *parent = nullptr);
        void StartContinuousGrabbing();
        Mat GetOpenCVImageFromICam();
    signals:
        void OneImageFinishSignal();
    private slots:
        void OneImageFinishSlot();
    private:
        ICam *icam;
        Detect *detect;
};
Acquisition:: Acquisition(QObject *parent) : QObject(parent)
                                                    ,icam(new ICam())
                                                    ,detect(new Detect())
{
    //connect(this->icam, SIGNAL(OneImageFinishSignal()), this, SIGNAL(OneImageFinishSignal()));
    connect(this->icam, SIGNAL(OneImageFinishSignal()), this, SLOT(OneImageFinishSlot()));
}

void Acquisition::OneImageFinishSlot(){
    cv::Mat img_input= icam-> GetOpencvImage ();
    cv::Point center_iris_offset;
    center_offset = detect->CalculateOffset(img_input, 0);
    emit(OneImageFinishSignal());
}
void Acquisition::StartContinuousGrabbing(){
    this->icam->StartContinuousGrabbing();
}
Mat CDLImageAcquisition::GetOpenCVImageFromICam(){
    return this->icam_->GetOpencvImage();
}

Main GUI class:

class MainWizard : public QWizard
{
    Q_OBJECT
    public:
        explicit MainWizard(QWidget *parent = nullptr);
    private slots:
        void OneImageFinishSlot();
        void ShowImage(QWidget *object, Mat image);
    private:
        virtual bool eventFilter(QObject *watched, QEvent *event);
        Acquisition *acquisition;
};

MainWizard::MainWizard(QWidget *parent) :
    QWizard(parent),
    ui(new Ui::MainWizard), 
    acquisition(new Acquisition())
{
    ui->setupUi(this);
    ui->dock_cnt_continuous_grab->installEventFilter(this);//Install Qt's event filter
    acquisition ->StartContinuousGrabbing();
    connect(acquisition, SIGNAL(OneImageFinishSignal()), this, SLOT(OneImageFinishSlot()));
}

void MainWizard::OneImageFinishSlot(){
    ui->dock_cnt_continuous_grab->update();

}

bool MainWizard::eventFilter(QObject *watched, QEvent *event)
{
  if (watched == ui->dock_cnt_continuous_grab && event->type() == QEvent::Paint)
  {
      cv::Mat opencv_image = acquisition->GetOpenCVImageFromICam();
      this->ShowImage(ui->dock_cnt_continuous_grab, opencv_image);
  }
  return false;
}

void MainWizard::ShowImage(QWidget *widget, Mat image)
{
    m_mutex_lock.lock();
    QPainter painter(widget);

    QImage img((const unsigned char *)(image.data), image.cols, image.rows, QImage::Format_RGB888);

    QRectF target;
    target.setLeft(0);
    target.setTop(0);
    target.setSize(this->size());

    QRectF source;
    source.setLeft(0);
    source.setTop(0);
    source.setSize(img.size());

    QPixmap pixmap = QPixmap::fromImage(img);
    painter.drawImage(target,img, source);
    painter.drawPixmap(target, pixmap, source);
    m_mutex_lock.unlock();
}

Solution

  • I finally fixed the problem by making some changes. I think the main problem was that the size of the grabbed frames is too big for the program to analyze and show on GUI at the same time. The initial grabbed image size is 4000*3096*3. I resize the height and width by 8 before sending it through the signal and resize it again by 2 for each dimension for image processing. Additionally, I emit the signal that carries the newly grabbed image instead of emitting empty signal like before. On the GUI side, I use QGraphicsView and QGraphicsItems instead of the paint event filter. Here is the detail code:

    class ICam: public QObject
    ,public Pylon::CImageEventHandler  {
    Q_OBJECT
    public:
        ICam(QObject *parent = nullptr);
        void StartContinuousGrabbing();
    signals:
        void OneImageFinishSignal(Mat img);
    protected:
        virtual void OnImageGrabbed(Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& grabResult);
    private:
        CBaslerUniversalInstantCamera m_camera;
        CPylonImage pylon_image;
        CImageFormatConverter format_converter;// in constructor: format_converter.OutputPixelFormat = PixelType_BGR8packed;
    };
    
    void ICam::OnImageGrabbed(Pylon::CInstantCamera& camera, const Pylon::CGrabResultPtr& grabResult)
    {
        clock.Lock();
        m_ptr_grab_result = grabResult;//Pass the captured image out
        m_bitmap_image.CopyImage(grabResult);
        format_converter.Convert(pylon_image, grabResult);
        clock.Unlock();
    
        cv::Mat img_resize;
        resize(opencv_image_, img_resize, cv::Size(opencv_image_.cols/8,
        opencv_image_.rows/8), 0, 0, cv::INTER_AREA);
        //Tell the gui that there is a new image available
        emit OneImageFinishSignal(img_resize.clone());
     }
    
    
    void ICam::StartContinuousGrabbing(){
        m_camera.StartGrabbing( Pylon::GrabStrategy_OneByOne, Pylon::GrabLoop_ProvidedByInstantCamera);
    }
    

    Acquisition class:

    class Acquisition: public QObject
    {
        Q_OBJECT
        public:
            Acquisition (QObject *parent = nullptr);
        signals:
            void OneImageFinishSignal(Mat img);
        private slots:
            void OneImageFinishSlot(Mat img_input);
        private:
            ICam *icam;
            Detect *detect;
    };
    Acquisition:: Acquisition(QObject *parent) : QObject(parent)
                                                    ,icam(new ICam())
                                                    ,detect(new Detect(2))//resize by 2
    {
    
        connect(this->icam, SIGNAL(OneImageFinishSignal()), this, SLOT(OneImageFinishSlot()));
    }
    
    void Acquisition::OneImageFinishSlot(Mat img_input){
        cv::Point center_iris_offset;
        center_offset = detect->CalculateOffset(img_input, 0);
        emit OneImageFinishSignal(img_input);
    }
    

    Main GUI class:

    class MainWizard : public QWizard
    {
        Q_OBJECT
        public:
            explicit MainWizard(QWidget *parent = nullptr);
            QImage PutImage(const Mat& mat);
        private slots:
            void OneImageFinishSlot(Mat img);
        private:
            QGraphicsScene* scene;
            QGraphicsPixmapItem* pixmap_item;
            QMutex m_mutex_lock;
            cv::Mat opencv_image;            
            Acquisition *acquisition;
    };
    
    MainWizard::MainWizard(QWidget *parent) :
        QWizard(parent),
        ui(new Ui::MainWizard), 
        acquisition(new Acquisition())
    {
        ui->setupUi(this);
        scene = new QGraphicsScene(this);
        pixmap_item = new QGraphicsPixmapItem();
        scene->addItem(pixmap_item);
    
        acquisition ->StartContinuousGrabbing();
        connect(acquisition, SIGNAL(OneImageFinishSignal(Mat)), this, SLOT(OneImageFinishSlot(Mat)));
    }
    
    void MainWizard::OneImageFinishSlot(Mat img){
        m_mutex_lock.lock();
        pixmap_item->setPixmap(QPixmap::fromImage(PutImage(img)));
        scene->update();
    
        ui->gphv_continuous_grab->setScene(scene);
        ui->gphv_continuous_grab->update();
    
        m_mutex_lock.unlock();
    }
    
    
    /*Convert openCV image to QImage*/
    QImage MainWizard::PutImage(const Mat& mat)
    {
        // 8-bits unsigned, NO. OF CHANNELS=1
        if(mat.type()==CV_8UC1){
            // Set the color table (used to translate colour indexes to qRgb values)
            QVector<QRgb> colorTable;
            for (int i=0; i<256; i++)
                colorTable.push_back(qRgb(i,i,i));
            // Copy input Mat
            const uchar *qImageBuffer = (const uchar*)mat.data;
            // Create QImage with same dimensions as input Mat
            QImage img(qImageBuffer, mat.cols, mat.rows, QImage::Format_Indexed8);
            img.setColorTable(colorTable);
            return img;
        }
        // 8-bits unsigned, NO. OF CHANNELS=3
        if(mat.type()==CV_8UC3){
            // Copy input Mat
            const uchar *qImageBuffer = (const uchar*)mat.data;
            // Create QImage with same dimensions as input Mat
            QImage img(qImageBuffer, mat.cols, mat.rows, QImage::Format_RGB888);
            return img.rgbSwapped();
        }else{
            qDebug() << "ERROR: Mat could not be converted to QImage.";
            return QImage();
        }
    }