diff --git a/source/Miniscope-DAQ-QT-Software.pro b/source/Miniscope-DAQ-QT-Software.pro index 4d7421d..3af1c2f 100644 --- a/source/Miniscope-DAQ-QT-Software.pro +++ b/source/Miniscope-DAQ-QT-Software.pro @@ -79,6 +79,8 @@ win32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../../../../openc #win32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../../../../opencv-build420/install/x64/vc15/lib/ -lopencv_videoio_ffmpeg420_64 else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../../../../opencv-build420/install/x64/vc15/lib/ -lopencv_world420d +#LIBS += -L$$PWD/../../../../../../../opencv-build420/install/x64/vc15/lib/ -lopencv_world420 + INCLUDEPATH += $$PWD/../../../../../../../opencv-build420/install/include DEPENDPATH += $$PWD/../../../../../../../opencv-build420/install/include diff --git a/source/behaviorcam.cpp b/source/behaviorcam.cpp index 76b0520..f11f1be 100644 --- a/source/behaviorcam.cpp +++ b/source/behaviorcam.cpp @@ -43,7 +43,7 @@ BehaviorCam::BehaviorCam(QObject *parent, QJsonObject ucBehavCam) : // ------------------------- // Setup OpenCV camera stream - behavCamStream = new VideoStreamOCV; + behavCamStream = new VideoStreamOCV(nullptr, m_cBehavCam["width"].toInt(), m_cBehavCam["height"].toInt()); behavCamStream->setDeviceName(m_deviceName); behavCamStream->setStreamHeadOrientation(m_streamHeadOrientationState); @@ -122,6 +122,12 @@ void BehaviorCam::createView() vidDisplay = rootObject->findChild("vD"); vidDisplay->setMaxBuffer(FRAME_BUFFER_SIZE); + // Turn on or off saturation display + if (m_ucBehavCam["showSaturation"].toBool(true)) + vidDisplay->setShowSaturation(1); + else + vidDisplay->setShowSaturation(0); + QObject::connect(rootObject, SIGNAL( takeScreenShotSignal() ), this, SLOT( handleTakeScreenShotSignal() )); QObject::connect(rootObject, SIGNAL( vidPropChangedSignal(QString, double, double, double) ), diff --git a/source/miniscope.cpp b/source/miniscope.cpp index 492dcbb..e6ef23f 100644 --- a/source/miniscope.cpp +++ b/source/miniscope.cpp @@ -47,7 +47,7 @@ Miniscope::Miniscope(QObject *parent, QJsonObject ucMiniscope) : // ------------------------- // Setup OpenCV camera stream - miniscopeStream = new VideoStreamOCV; + miniscopeStream = new VideoStreamOCV(nullptr, m_cMiniscopes["width"].toInt(-1), m_cMiniscopes["height"].toInt(-1)); miniscopeStream->setDeviceName(m_deviceName); miniscopeStream->setStreamHeadOrientation(m_streamHeadOrientationState); @@ -141,6 +141,12 @@ void Miniscope::createView() vidDisplay = rootObject->findChild("vD"); vidDisplay->setMaxBuffer(FRAME_BUFFER_SIZE); + // Turn on or off show saturation display + if (m_ucMiniscope["showSaturation"].toBool(true)) + vidDisplay->setShowSaturation(1); + else + vidDisplay->setShowSaturation(0); + if (m_streamHeadOrientationState) bnoDisplay = rootObject->findChild("bno"); @@ -399,7 +405,6 @@ void Miniscope::sendNewFrame(){ // vidDisplay->setProperty("displayFrame", QImage("C:/Users/DBAharoni/Pictures/Miniscope/Logo/1.png")); int f = *m_acqFrameNum; cv::Mat tempMat1, tempMat2; - if (f > m_previousDisplayFrameNum) { m_previousDisplayFrameNum = f; QImage tempFrame2; @@ -434,8 +439,11 @@ void Miniscope::sendNewFrame(){ baselinePreviousTimeStamp = timeStampBuffer[f]; baselineFrameBufWritePos++; } - if (m_displatState == "Raw") + + if (m_displatState == "Raw") { + vidDisplay->setDisplayFrame(tempFrame2.copy()); + } else if (m_displatState == "dFF") { // TODO: Implement this better. I am sure it can be sped up a lot. Maybe do most of it in a shader tempMat2 = frameBuffer[f].clone(); diff --git a/source/videodisplay.cpp b/source/videodisplay.cpp index 55afdea..5d70311 100644 --- a/source/videodisplay.cpp +++ b/source/videodisplay.cpp @@ -34,6 +34,14 @@ void VideoDisplay::setDisplayFrame(QImage frame) { if (m_renderer) m_renderer->setDisplayFrame(m_displayFrame2.copy()); } + +void VideoDisplay::setShowSaturation(double value) +{ + m_showSaturation = value; + if (m_renderer) { + m_renderer->setShowSaturation(value); + } +} //void VideoDisplayRenderer::setDisplayFrame(QImage frame) { // m_displayFrame = frame; //} @@ -75,6 +83,7 @@ void VideoDisplay::sync() { if (!m_renderer) { m_renderer = new VideoDisplayRenderer(); + m_renderer->setShowSaturation(m_showSaturation); // m_renderer->setDisplayFrame(QImage("C:/Users/DBAharoni/Pictures/Miniscope/Logo/1.png")); connect(window(), &QQuickWindow::beforeRendering, m_renderer, &VideoDisplayRenderer::paint, Qt::DirectConnection); } @@ -140,7 +149,7 @@ void VideoDisplayRenderer::paint() m_program->setAttributeArray(1, GL_FLOAT, texcoord, 2); m_program->setUniformValue("alpha", (float) m_alpha); m_program->setUniformValue("beta", (float) m_beta); - m_program->setUniformValue("showSaturation", (float) 1); + m_program->setUniformValue("showSaturation", (float) m_showStaturation); glViewport(0, 0, m_viewportSize.width(), m_viewportSize.height()); diff --git a/source/videodisplay.h b/source/videodisplay.h index 0a28236..09f4032 100644 --- a/source/videodisplay.h +++ b/source/videodisplay.h @@ -22,7 +22,8 @@ class VideoDisplayRenderer : public QObject, protected QOpenGLFunctions m_texture(0), m_newFrame(false), m_alpha(1), - m_beta(0) + m_beta(0), + m_showStaturation(1) { } ~VideoDisplayRenderer(); @@ -32,6 +33,7 @@ class VideoDisplayRenderer : public QObject, protected QOpenGLFunctions void setWindow(QQuickWindow *window) { m_window = window; } void setAlpha(double a) {m_alpha = a;} void setBeta(double b) {m_beta = b;} + void setShowSaturation(double value) {m_showStaturation = value; } signals: void requestNewFrame(); @@ -50,6 +52,7 @@ public slots: double m_alpha; double m_beta; + double m_showStaturation; }; @@ -86,6 +89,7 @@ class VideoDisplay : public QQuickItem void setDisplayFrame(QImage frame); void setAlpha(double a) {m_renderer->setAlpha(a);} void setBeta(double b) {m_renderer->setBeta(b);} + void setShowSaturation(double value); signals: void tChanged(); @@ -111,6 +115,8 @@ private slots: int m_droppedFrameCount; QImage m_displayFrame2; VideoDisplayRenderer *m_renderer; + + double m_showSaturation; }; //! [2] diff --git a/source/videostreamocv.cpp b/source/videostreamocv.cpp index 8b074ec..ce38bd2 100644 --- a/source/videostreamocv.cpp +++ b/source/videostreamocv.cpp @@ -12,13 +12,16 @@ #include #include -VideoStreamOCV::VideoStreamOCV(QObject *parent) : +VideoStreamOCV::VideoStreamOCV(QObject *parent, int width, int height) : QObject(parent), m_deviceName(""), m_stopStreaming(false), m_streamHeadOrientationState(false), m_isColor(false), - m_trackExtTrigger(false) + m_trackExtTrigger(false), + m_expectedWidth(width), + m_expectedHeight(height), + m_connectionType("") { } @@ -34,12 +37,15 @@ int VideoStreamOCV::connect2Camera(int cameraID) { m_cameraID = cameraID; cam = new cv::VideoCapture; // Try connecting using DShow backend - if (cam->open(m_cameraID, cv::CAP_DSHOW)) + if (cam->open(m_cameraID, cv::CAP_DSHOW)) { connectionState = 1; + m_connectionType = "DSHOW"; + } else { // connecting again using defaulk backend if (cam->open(m_cameraID)) { connectionState = 2; + m_connectionType = "OTHER"; } } // qDebug() << "Camera capture backend is" << QString::fromStdString (cam->getBackendName()); @@ -92,13 +98,13 @@ void VideoStreamOCV::startStream() if (!cam->grab()) { sendMessage("Warning: " + m_deviceName + " grab frame failed. Attempting to reconnect."); if (cam->isOpened()) { - qDebug() << "Releasing cam" << m_cameraID; + qDebug() << "Grab failed: Releasing cam" << m_cameraID; cam->release(); qDebug() << "Released cam" << m_cameraID; } QThread::msleep(1000); - if (cam->open(m_cameraID)) { + if (attemptReconnect()) { // TODO: add some timeout here sendMessage("Warning: " + m_deviceName + " reconnected."); qDebug() << "Reconnect to camera" << m_cameraID; @@ -109,79 +115,90 @@ void VideoStreamOCV::startStream() if (!cam->retrieve(frame)) { sendMessage("Warning: " + m_deviceName + " retrieve frame failed. Attempting to reconnect."); if (cam->isOpened()) { - qDebug() << "Releasing cam" << m_cameraID; + qDebug() << "Retieve failed: Releasing cam" << m_cameraID; cam->release(); qDebug() << "Released cam" << m_cameraID; } QThread::msleep(1000); - if (cam->open(m_cameraID)) { + if (attemptReconnect()) { // TODO: add some timeout here sendMessage("Warning: " + m_deviceName + " reconnected."); qDebug() << "Reconnect to camera" << m_cameraID; } } else { - if (m_isColor) { - frame.copyTo(frameBuffer[idx%frameBufferSize]); + // Let's make sure the frame acquired has the correct size. An openCV error seems to occur on cam reconnect due to a mismatch in size. + if (frame.cols != m_expectedWidth || frame.rows != m_expectedHeight) { + sendMessage("Warning: " + m_deviceName + " acquired frame has wrong size. [" + QString::number(frame.cols) + ", " + QString::number(frame.rows) + "]"); + qDebug() << "Wrong frame size for device" << m_cameraID; + + // This likely means the correct video stream crashed and openCV defaulted to a different video stream. So lets disconnect and try to reconnect to the correct one + cam->release(); + } else { -// frame = cv::repeat(frame,4,4); - cv::cvtColor(frame, frameBuffer[idx%frameBufferSize], cv::COLOR_BGR2GRAY); - } -// qDebug() << "Frame Number:" << *m_acqFrameNum - cam->get(cv::CAP_PROP_CONTRAST); - - // frameBuffer[idx%frameBufferSize] = frame; - if (m_trackExtTrigger) { - if (extTriggerLast == -1) { - // first time grabbing trigger state. - extTriggerLast = cam->get(cv::CAP_PROP_GAMMA); + if (m_isColor) { + frame.copyTo(frameBuffer[idx%frameBufferSize]); } else { - extTrigger = cam->get(cv::CAP_PROP_GAMMA); - if (extTriggerLast != extTrigger) { - // State change - if (extTriggerLast == 0) { - // Went from 0 to 1 - emit extTriggered(true); - } - else { - // Went from 1 to 0 - emit extTriggered(false); + // frame = cv::repeat(frame,4,4); + cv::cvtColor(frame, frameBuffer[idx%frameBufferSize], cv::COLOR_BGR2GRAY); + } + // qDebug() << "Frame Number:" << *m_acqFrameNum - cam->get(cv::CAP_PROP_CONTRAST); + + // frameBuffer[idx%frameBufferSize] = frame; + if (m_trackExtTrigger) { + if (extTriggerLast == -1) { + // first time grabbing trigger state. + extTriggerLast = cam->get(cv::CAP_PROP_GAMMA); + } + else { + extTrigger = cam->get(cv::CAP_PROP_GAMMA); + if (extTriggerLast != extTrigger) { + // State change + if (extTriggerLast == 0) { + // Went from 0 to 1 + emit extTriggered(true); + } + else { + // Went from 1 to 0 + emit extTriggered(false); + } } + extTriggerLast = extTrigger; } - extTriggerLast = extTrigger; } - } - if (m_streamHeadOrientationState) { - - // BNO output is a unit quaternion after 2^14 division - w = static_cast(cam->get(cv::CAP_PROP_SATURATION)); - x = static_cast(cam->get(cv::CAP_PROP_HUE)); - y = static_cast(cam->get(cv::CAP_PROP_GAIN)); - z = static_cast(cam->get(cv::CAP_PROP_BRIGHTNESS)); - norm = sqrt(w*w + x*x + y*y + z*z); - bnoBuffer[(idx%frameBufferSize)*4 + 0] = w/16384.0; - bnoBuffer[(idx%frameBufferSize)*4 + 1] = x/16384.0; - bnoBuffer[(idx%frameBufferSize)*4 + 2] = y/16384.0; - bnoBuffer[(idx%frameBufferSize)*4 + 3] = z/16384.0; -// qDebug() << QString::number(static_cast(cam->get(cv::CAP_PROP_SHARPNESS)),2) << norm << w << x << y << z ; - } - if (daqFrameNum != nullptr) { - *daqFrameNum = cam->get(cv::CAP_PROP_CONTRAST) - daqFrameNumOffset; -// qDebug() << cam->get(cv::CAP_PROP_CONTRAST);// *daqFrameNum; - if (*m_acqFrameNum == 0) // Used to initially sync daqFrameNum with acqFrameNum - daqFrameNumOffset = *daqFrameNum - 1; - } + if (m_streamHeadOrientationState) { + // BNO output is a unit quaternion after 2^14 division + w = static_cast(cam->get(cv::CAP_PROP_SATURATION)); + x = static_cast(cam->get(cv::CAP_PROP_HUE)); + y = static_cast(cam->get(cv::CAP_PROP_GAIN)); + z = static_cast(cam->get(cv::CAP_PROP_BRIGHTNESS)); + norm = sqrt(w*w + x*x + y*y + z*z); + bnoBuffer[(idx%frameBufferSize)*4 + 0] = w/16384.0; + bnoBuffer[(idx%frameBufferSize)*4 + 1] = x/16384.0; + bnoBuffer[(idx%frameBufferSize)*4 + 2] = y/16384.0; + bnoBuffer[(idx%frameBufferSize)*4 + 3] = z/16384.0; + // qDebug() << QString::number(static_cast(cam->get(cv::CAP_PROP_SHARPNESS)),2) << norm << w << x << y << z ; + } + if (daqFrameNum != nullptr) { + *daqFrameNum = cam->get(cv::CAP_PROP_CONTRAST) - daqFrameNumOffset; + // qDebug() << cam->get(cv::CAP_PROP_CONTRAST);// *daqFrameNum; + if (*m_acqFrameNum == 0) // Used to initially sync daqFrameNum with acqFrameNum + daqFrameNumOffset = *daqFrameNum - 1; + } - m_acqFrameNum->operator++(); -// qDebug() << *m_acqFrameNum << *daqFrameNum; - idx++; - usedFrames->release(); + m_acqFrameNum->operator++(); + // qDebug() << *m_acqFrameNum << *daqFrameNum; + idx++; +// usedFrames->release(); - emit newFrameAvailable(m_deviceName, *m_acqFrameNum); + emit newFrameAvailable(m_deviceName, *m_acqFrameNum); + } } } + usedFrames->release(); } // Get any new events @@ -292,3 +309,16 @@ void VideoStreamOCV::sendCommands() } } + +bool VideoStreamOCV::attemptReconnect() +{ + if (m_connectionType == "DSHOW") { + if (cam->open(m_cameraID, cv::CAP_DSHOW)) + return true; + } + else if (m_connectionType == "OTHER") { + if (cam->open(m_cameraID)) + return true; + } + return false; +} diff --git a/source/videostreamocv.h b/source/videostreamocv.h index 70ef887..9d35f53 100644 --- a/source/videostreamocv.h +++ b/source/videostreamocv.h @@ -18,7 +18,7 @@ class VideoStreamOCV : public QObject { Q_OBJECT public: - explicit VideoStreamOCV(QObject *parent = nullptr); + explicit VideoStreamOCV(QObject *parent = nullptr, int width = 0, int height = 0); ~VideoStreamOCV(); // void setCameraID(int cameraID); void setBufferParameters(cv::Mat *frameBuf, qint64 *tsBuf, float *bnoBuf, @@ -45,6 +45,7 @@ public slots: private: void sendCommands(); + bool attemptReconnect(); int m_cameraID; QString m_deviceName; cv::VideoCapture *cam; @@ -67,6 +68,10 @@ public slots: bool m_trackExtTrigger; + int m_expectedWidth; + int m_expectedHeight; + + QString m_connectionType; }; diff --git a/userConfigs/UserConfigExample_BehavCam.json b/userConfigs/UserConfigExample_BehavCam.json index 4a56f19..4804775 100644 --- a/userConfigs/UserConfigExample_BehavCam.json +++ b/userConfigs/UserConfigExample_BehavCam.json @@ -28,6 +28,7 @@ "deviceName": "BehavCam 0", "deviceType": "WebCam", "deviceID": 0, + "showSaturation": true, "cameraCalibrationFileLocation": "", "compressionOptions": ["MJPG","MJ2C","XVID","FFV1"], "compression": "FFV1", diff --git a/userConfigs/UserConfigExample_V3_Miniscope.json b/userConfigs/UserConfigExample_V3_Miniscope.json index ca0b265..41efd25 100644 --- a/userConfigs/UserConfigExample_V3_Miniscope.json +++ b/userConfigs/UserConfigExample_V3_Miniscope.json @@ -30,6 +30,7 @@ "imageRegistration": "Off", "streamHeadOrientation": false, "deviceID": 1, + "showSaturation": true, "compressionOptions": ["MJPG","MJ2C","XVID","FFV1"], "compression": "FFV1", "framesPerFile": 1000, diff --git a/userConfigs/UserConfigExample_V3_Miniscope_Plus_BehavCam.json b/userConfigs/UserConfigExample_V3_Miniscope_Plus_BehavCam.json index d38808d..a0cb416 100644 --- a/userConfigs/UserConfigExample_V3_Miniscope_Plus_BehavCam.json +++ b/userConfigs/UserConfigExample_V3_Miniscope_Plus_BehavCam.json @@ -30,6 +30,7 @@ "imageRegistration": "Off", "streamHeadOrientation": false, "deviceID": 1, + "showSaturation": true, "compressionOptions": ["MJPG","MJ2C","XVID","FFV1"], "compression": "FFV1", "framesPerFile": 1000, @@ -47,6 +48,7 @@ "deviceName": "BehavCam 0", "deviceType": "WebCam", "deviceID": 0, + "showSaturation": true, "cameraCalibrationFileLocation": "", "compressionOptions": ["MJPG","MJ2C","XVID","FFV1"], "compression": "FFV1", diff --git a/userConfigs/UserConfigExample_V4_BNO_Miniscope.json b/userConfigs/UserConfigExample_V4_BNO_Miniscope.json index 35b3e71..a9d64b1 100644 --- a/userConfigs/UserConfigExample_V4_BNO_Miniscope.json +++ b/userConfigs/UserConfigExample_V4_BNO_Miniscope.json @@ -28,8 +28,9 @@ "deviceName": "Miniscope", "deviceType": "Miniscope_V4_BNO", "imageRegistration": "Off", - "streamHeadOrientation": false, + "streamHeadOrientation": true, "deviceID": 2, + "showSaturation": true, "compressionOptions": ["MJPG","MJ2C","XVID","FFV1"], "compression": "FFV1", "framesPerFile": 1000, diff --git a/userConfigs/UserConfigExample_V4_BNO_Plus_BehavCam.json b/userConfigs/UserConfigExample_V4_BNO_Plus_BehavCam.json index 0929024..dcbfac3 100644 --- a/userConfigs/UserConfigExample_V4_BNO_Plus_BehavCam.json +++ b/userConfigs/UserConfigExample_V4_BNO_Plus_BehavCam.json @@ -29,7 +29,8 @@ "deviceType": "Miniscope_V4_BNO", "imageRegistration": "Off", "streamHeadOrientation": true, - "deviceID": 1, + "deviceID": 2, + "showSaturation": true, "compressionOptions": ["MJPG","MJ2C","XVID","FFV1"], "compression": "FFV1", "framesPerFile": 1000, @@ -47,6 +48,7 @@ "deviceName": "BehavCam 0", "deviceType": "WebCam", "deviceID": 0, + "showSaturation": false, "cameraCalibrationFileLocation": "", "compressionOptions": ["MJPG","MJ2C","XVID","FFV1"], "compression": "FFV1",