Skip to content

Commit

Permalink
Add saturation display control to user config. Add correct handling o…
Browse files Browse the repository at this point in the history
…f main source of software crash!

miniscopes and cameras in user config now support "showSaturation" bool value to turn on or off red saturation display.

Software now correctly reconnects to a video stream if it temporarily disconnects. This means you can now actually unplug the coax cable and replug it in and the software will continue running.
  • Loading branch information
daharoni committed Feb 21, 2020
1 parent 76d8270 commit c6bfe47
Show file tree
Hide file tree
Showing 12 changed files with 138 additions and 65 deletions.
2 changes: 2 additions & 0 deletions source/Miniscope-DAQ-QT-Software.pro
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ win32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../../../../openc
#win32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../../../../opencv-build420/install/x64/vc15/lib/ -lopencv_videoio_ffmpeg420_64
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../../../../opencv-build420/install/x64/vc15/lib/ -lopencv_world420d

#LIBS += -L$$PWD/../../../../../../../opencv-build420/install/x64/vc15/lib/ -lopencv_world420

INCLUDEPATH += $$PWD/../../../../../../../opencv-build420/install/include
DEPENDPATH += $$PWD/../../../../../../../opencv-build420/install/include

Expand Down
8 changes: 7 additions & 1 deletion source/behaviorcam.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ BehaviorCam::BehaviorCam(QObject *parent, QJsonObject ucBehavCam) :
// -------------------------

// Setup OpenCV camera stream
behavCamStream = new VideoStreamOCV;
behavCamStream = new VideoStreamOCV(nullptr, m_cBehavCam["width"].toInt(), m_cBehavCam["height"].toInt());
behavCamStream->setDeviceName(m_deviceName);

behavCamStream->setStreamHeadOrientation(m_streamHeadOrientationState);
Expand Down Expand Up @@ -122,6 +122,12 @@ void BehaviorCam::createView()
vidDisplay = rootObject->findChild<VideoDisplay*>("vD");
vidDisplay->setMaxBuffer(FRAME_BUFFER_SIZE);

// Turn on or off saturation display
if (m_ucBehavCam["showSaturation"].toBool(true))
vidDisplay->setShowSaturation(1);
else
vidDisplay->setShowSaturation(0);

QObject::connect(rootObject, SIGNAL( takeScreenShotSignal() ),
this, SLOT( handleTakeScreenShotSignal() ));
QObject::connect(rootObject, SIGNAL( vidPropChangedSignal(QString, double, double, double) ),
Expand Down
14 changes: 11 additions & 3 deletions source/miniscope.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ Miniscope::Miniscope(QObject *parent, QJsonObject ucMiniscope) :
// -------------------------

// Setup OpenCV camera stream
miniscopeStream = new VideoStreamOCV;
miniscopeStream = new VideoStreamOCV(nullptr, m_cMiniscopes["width"].toInt(-1), m_cMiniscopes["height"].toInt(-1));
miniscopeStream->setDeviceName(m_deviceName);

miniscopeStream->setStreamHeadOrientation(m_streamHeadOrientationState);
Expand Down Expand Up @@ -141,6 +141,12 @@ void Miniscope::createView()
vidDisplay = rootObject->findChild<VideoDisplay*>("vD");
vidDisplay->setMaxBuffer(FRAME_BUFFER_SIZE);

// Turn on or off show saturation display
if (m_ucMiniscope["showSaturation"].toBool(true))
vidDisplay->setShowSaturation(1);
else
vidDisplay->setShowSaturation(0);

if (m_streamHeadOrientationState)
bnoDisplay = rootObject->findChild<QQuickItem*>("bno");

Expand Down Expand Up @@ -399,7 +405,6 @@ void Miniscope::sendNewFrame(){
// vidDisplay->setProperty("displayFrame", QImage("C:/Users/DBAharoni/Pictures/Miniscope/Logo/1.png"));
int f = *m_acqFrameNum;
cv::Mat tempMat1, tempMat2;

if (f > m_previousDisplayFrameNum) {
m_previousDisplayFrameNum = f;
QImage tempFrame2;
Expand Down Expand Up @@ -434,8 +439,11 @@ void Miniscope::sendNewFrame(){
baselinePreviousTimeStamp = timeStampBuffer[f];
baselineFrameBufWritePos++;
}
if (m_displatState == "Raw")

if (m_displatState == "Raw") {

vidDisplay->setDisplayFrame(tempFrame2.copy());
}
else if (m_displatState == "dFF") {
// TODO: Implement this better. I am sure it can be sped up a lot. Maybe do most of it in a shader
tempMat2 = frameBuffer[f].clone();
Expand Down
11 changes: 10 additions & 1 deletion source/videodisplay.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ void VideoDisplay::setDisplayFrame(QImage frame) {
if (m_renderer)
m_renderer->setDisplayFrame(m_displayFrame2.copy());
}

void VideoDisplay::setShowSaturation(double value)
{
m_showSaturation = value;
if (m_renderer) {
m_renderer->setShowSaturation(value);
}
}
//void VideoDisplayRenderer::setDisplayFrame(QImage frame) {
// m_displayFrame = frame;
//}
Expand Down Expand Up @@ -75,6 +83,7 @@ void VideoDisplay::sync()
{
if (!m_renderer) {
m_renderer = new VideoDisplayRenderer();
m_renderer->setShowSaturation(m_showSaturation);
// m_renderer->setDisplayFrame(QImage("C:/Users/DBAharoni/Pictures/Miniscope/Logo/1.png"));
connect(window(), &QQuickWindow::beforeRendering, m_renderer, &VideoDisplayRenderer::paint, Qt::DirectConnection);
}
Expand Down Expand Up @@ -140,7 +149,7 @@ void VideoDisplayRenderer::paint()
m_program->setAttributeArray(1, GL_FLOAT, texcoord, 2);
m_program->setUniformValue("alpha", (float) m_alpha);
m_program->setUniformValue("beta", (float) m_beta);
m_program->setUniformValue("showSaturation", (float) 1);
m_program->setUniformValue("showSaturation", (float) m_showStaturation);

glViewport(0, 0, m_viewportSize.width(), m_viewportSize.height());

Expand Down
8 changes: 7 additions & 1 deletion source/videodisplay.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ class VideoDisplayRenderer : public QObject, protected QOpenGLFunctions
m_texture(0),
m_newFrame(false),
m_alpha(1),
m_beta(0)
m_beta(0),
m_showStaturation(1)
{ }
~VideoDisplayRenderer();

Expand All @@ -32,6 +33,7 @@ class VideoDisplayRenderer : public QObject, protected QOpenGLFunctions
void setWindow(QQuickWindow *window) { m_window = window; }
void setAlpha(double a) {m_alpha = a;}
void setBeta(double b) {m_beta = b;}
void setShowSaturation(double value) {m_showStaturation = value; }

signals:
void requestNewFrame();
Expand All @@ -50,6 +52,7 @@ public slots:

double m_alpha;
double m_beta;
double m_showStaturation;


};
Expand Down Expand Up @@ -86,6 +89,7 @@ class VideoDisplay : public QQuickItem
void setDisplayFrame(QImage frame);
void setAlpha(double a) {m_renderer->setAlpha(a);}
void setBeta(double b) {m_renderer->setBeta(b);}
void setShowSaturation(double value);

signals:
void tChanged();
Expand All @@ -111,6 +115,8 @@ private slots:
int m_droppedFrameCount;
QImage m_displayFrame2;
VideoDisplayRenderer *m_renderer;

double m_showSaturation;
};
//! [2]

Expand Down
142 changes: 86 additions & 56 deletions source/videostreamocv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,16 @@
#include <QDateTime>
#include <QThread>

VideoStreamOCV::VideoStreamOCV(QObject *parent) :
VideoStreamOCV::VideoStreamOCV(QObject *parent, int width, int height) :
QObject(parent),
m_deviceName(""),
m_stopStreaming(false),
m_streamHeadOrientationState(false),
m_isColor(false),
m_trackExtTrigger(false)
m_trackExtTrigger(false),
m_expectedWidth(width),
m_expectedHeight(height),
m_connectionType("")
{

}
Expand All @@ -34,12 +37,15 @@ int VideoStreamOCV::connect2Camera(int cameraID) {
m_cameraID = cameraID;
cam = new cv::VideoCapture;
// Try connecting using DShow backend
if (cam->open(m_cameraID, cv::CAP_DSHOW))
if (cam->open(m_cameraID, cv::CAP_DSHOW)) {
connectionState = 1;
m_connectionType = "DSHOW";
}
else {
// connecting again using defaulk backend
if (cam->open(m_cameraID)) {
connectionState = 2;
m_connectionType = "OTHER";
}
}
// qDebug() << "Camera capture backend is" << QString::fromStdString (cam->getBackendName());
Expand Down Expand Up @@ -92,13 +98,13 @@ void VideoStreamOCV::startStream()
if (!cam->grab()) {
sendMessage("Warning: " + m_deviceName + " grab frame failed. Attempting to reconnect.");
if (cam->isOpened()) {
qDebug() << "Releasing cam" << m_cameraID;
qDebug() << "Grab failed: Releasing cam" << m_cameraID;
cam->release();
qDebug() << "Released cam" << m_cameraID;
}
QThread::msleep(1000);

if (cam->open(m_cameraID)) {
if (attemptReconnect()) {
// TODO: add some timeout here
sendMessage("Warning: " + m_deviceName + " reconnected.");
qDebug() << "Reconnect to camera" << m_cameraID;
Expand All @@ -109,79 +115,90 @@ void VideoStreamOCV::startStream()
if (!cam->retrieve(frame)) {
sendMessage("Warning: " + m_deviceName + " retrieve frame failed. Attempting to reconnect.");
if (cam->isOpened()) {
qDebug() << "Releasing cam" << m_cameraID;
qDebug() << "Retieve failed: Releasing cam" << m_cameraID;
cam->release();
qDebug() << "Released cam" << m_cameraID;
}
QThread::msleep(1000);

if (cam->open(m_cameraID)) {
if (attemptReconnect()) {
// TODO: add some timeout here
sendMessage("Warning: " + m_deviceName + " reconnected.");
qDebug() << "Reconnect to camera" << m_cameraID;
}
}
else {
if (m_isColor) {
frame.copyTo(frameBuffer[idx%frameBufferSize]);
// Let's make sure the frame acquired has the correct size. An openCV error seems to occur on cam reconnect due to a mismatch in size.
if (frame.cols != m_expectedWidth || frame.rows != m_expectedHeight) {
sendMessage("Warning: " + m_deviceName + " acquired frame has wrong size. [" + QString::number(frame.cols) + ", " + QString::number(frame.rows) + "]");
qDebug() << "Wrong frame size for device" << m_cameraID;

// This likely means the correct video stream crashed and openCV defaulted to a different video stream. So lets disconnect and try to reconnect to the correct one
cam->release();

}
else {
// frame = cv::repeat(frame,4,4);
cv::cvtColor(frame, frameBuffer[idx%frameBufferSize], cv::COLOR_BGR2GRAY);
}
// qDebug() << "Frame Number:" << *m_acqFrameNum - cam->get(cv::CAP_PROP_CONTRAST);

// frameBuffer[idx%frameBufferSize] = frame;
if (m_trackExtTrigger) {
if (extTriggerLast == -1) {
// first time grabbing trigger state.
extTriggerLast = cam->get(cv::CAP_PROP_GAMMA);
if (m_isColor) {
frame.copyTo(frameBuffer[idx%frameBufferSize]);
}
else {
extTrigger = cam->get(cv::CAP_PROP_GAMMA);
if (extTriggerLast != extTrigger) {
// State change
if (extTriggerLast == 0) {
// Went from 0 to 1
emit extTriggered(true);
}
else {
// Went from 1 to 0
emit extTriggered(false);
// frame = cv::repeat(frame,4,4);
cv::cvtColor(frame, frameBuffer[idx%frameBufferSize], cv::COLOR_BGR2GRAY);
}
// qDebug() << "Frame Number:" << *m_acqFrameNum - cam->get(cv::CAP_PROP_CONTRAST);

// frameBuffer[idx%frameBufferSize] = frame;
if (m_trackExtTrigger) {
if (extTriggerLast == -1) {
// first time grabbing trigger state.
extTriggerLast = cam->get(cv::CAP_PROP_GAMMA);
}
else {
extTrigger = cam->get(cv::CAP_PROP_GAMMA);
if (extTriggerLast != extTrigger) {
// State change
if (extTriggerLast == 0) {
// Went from 0 to 1
emit extTriggered(true);
}
else {
// Went from 1 to 0
emit extTriggered(false);
}
}
extTriggerLast = extTrigger;
}
extTriggerLast = extTrigger;
}
}
if (m_streamHeadOrientationState) {

// BNO output is a unit quaternion after 2^14 division
w = static_cast<qint16>(cam->get(cv::CAP_PROP_SATURATION));
x = static_cast<qint16>(cam->get(cv::CAP_PROP_HUE));
y = static_cast<qint16>(cam->get(cv::CAP_PROP_GAIN));
z = static_cast<qint16>(cam->get(cv::CAP_PROP_BRIGHTNESS));
norm = sqrt(w*w + x*x + y*y + z*z);
bnoBuffer[(idx%frameBufferSize)*4 + 0] = w/16384.0;
bnoBuffer[(idx%frameBufferSize)*4 + 1] = x/16384.0;
bnoBuffer[(idx%frameBufferSize)*4 + 2] = y/16384.0;
bnoBuffer[(idx%frameBufferSize)*4 + 3] = z/16384.0;
// qDebug() << QString::number(static_cast<qint16>(cam->get(cv::CAP_PROP_SHARPNESS)),2) << norm << w << x << y << z ;
}
if (daqFrameNum != nullptr) {
*daqFrameNum = cam->get(cv::CAP_PROP_CONTRAST) - daqFrameNumOffset;
// qDebug() << cam->get(cv::CAP_PROP_CONTRAST);// *daqFrameNum;
if (*m_acqFrameNum == 0) // Used to initially sync daqFrameNum with acqFrameNum
daqFrameNumOffset = *daqFrameNum - 1;
}
if (m_streamHeadOrientationState) {
// BNO output is a unit quaternion after 2^14 division
w = static_cast<qint16>(cam->get(cv::CAP_PROP_SATURATION));
x = static_cast<qint16>(cam->get(cv::CAP_PROP_HUE));
y = static_cast<qint16>(cam->get(cv::CAP_PROP_GAIN));
z = static_cast<qint16>(cam->get(cv::CAP_PROP_BRIGHTNESS));
norm = sqrt(w*w + x*x + y*y + z*z);
bnoBuffer[(idx%frameBufferSize)*4 + 0] = w/16384.0;
bnoBuffer[(idx%frameBufferSize)*4 + 1] = x/16384.0;
bnoBuffer[(idx%frameBufferSize)*4 + 2] = y/16384.0;
bnoBuffer[(idx%frameBufferSize)*4 + 3] = z/16384.0;
// qDebug() << QString::number(static_cast<qint16>(cam->get(cv::CAP_PROP_SHARPNESS)),2) << norm << w << x << y << z ;
}
if (daqFrameNum != nullptr) {
*daqFrameNum = cam->get(cv::CAP_PROP_CONTRAST) - daqFrameNumOffset;
// qDebug() << cam->get(cv::CAP_PROP_CONTRAST);// *daqFrameNum;
if (*m_acqFrameNum == 0) // Used to initially sync daqFrameNum with acqFrameNum
daqFrameNumOffset = *daqFrameNum - 1;
}

m_acqFrameNum->operator++();
// qDebug() << *m_acqFrameNum << *daqFrameNum;
idx++;
usedFrames->release();
m_acqFrameNum->operator++();
// qDebug() << *m_acqFrameNum << *daqFrameNum;
idx++;
// usedFrames->release();

emit newFrameAvailable(m_deviceName, *m_acqFrameNum);
emit newFrameAvailable(m_deviceName, *m_acqFrameNum);
}
}
}
usedFrames->release();
}

// Get any new events
Expand Down Expand Up @@ -292,3 +309,16 @@ void VideoStreamOCV::sendCommands()
}

}

bool VideoStreamOCV::attemptReconnect()
{
if (m_connectionType == "DSHOW") {
if (cam->open(m_cameraID, cv::CAP_DSHOW))
return true;
}
else if (m_connectionType == "OTHER") {
if (cam->open(m_cameraID))
return true;
}
return false;
}
7 changes: 6 additions & 1 deletion source/videostreamocv.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class VideoStreamOCV : public QObject
{
Q_OBJECT
public:
explicit VideoStreamOCV(QObject *parent = nullptr);
explicit VideoStreamOCV(QObject *parent = nullptr, int width = 0, int height = 0);
~VideoStreamOCV();
// void setCameraID(int cameraID);
void setBufferParameters(cv::Mat *frameBuf, qint64 *tsBuf, float *bnoBuf,
Expand All @@ -45,6 +45,7 @@ public slots:

private:
void sendCommands();
bool attemptReconnect();
int m_cameraID;
QString m_deviceName;
cv::VideoCapture *cam;
Expand All @@ -67,6 +68,10 @@ public slots:

bool m_trackExtTrigger;

int m_expectedWidth;
int m_expectedHeight;

QString m_connectionType;

};

Expand Down
1 change: 1 addition & 0 deletions userConfigs/UserConfigExample_BehavCam.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"deviceName": "BehavCam 0",
"deviceType": "WebCam",
"deviceID": 0,
"showSaturation": true,
"cameraCalibrationFileLocation": "",
"compressionOptions": ["MJPG","MJ2C","XVID","FFV1"],
"compression": "FFV1",
Expand Down
Loading

0 comments on commit c6bfe47

Please sign in to comment.