diff --git a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm index 7e2f8722e0665cd749d025d30ebea169491aad52..0759702e825fd39f065f2dd096b65e7d04ec6bdb 100644 --- a/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm +++ b/src/plugins/avfoundation/mediaplayer/avfvideoframerenderer.mm @@ -126,8 +126,14 @@ QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *lay //Get size from AVPlayerLayer m_targetSize = QSize(layer.bounds.size.width, layer.bounds.size.height); + QOpenGLContext *shareContext = !m_glContext && m_surface + ? qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>()) + : nullptr; + //Make sure we have an OpenGL context to make current - if (!QOpenGLContext::currentContext() && !m_glContext) { + if ((shareContext && shareContext != QOpenGLContext::currentContext()) + || (!QOpenGLContext::currentContext() && !m_glContext)) { + //Create Hidden QWindow surface to create context in this thread m_offscreenSurface = new QWindow(); m_offscreenSurface->setSurfaceType(QWindow::OpenGLSurface); @@ -135,12 +141,6 @@ QOpenGLFramebufferObject *AVFVideoFrameRenderer::initRenderer(AVPlayerLayer *lay m_offscreenSurface->setGeometry(0, 0, 1, 1); m_offscreenSurface->create(); - //Create OpenGL context and set share context from surface - QOpenGLContext *shareContext = 0; - if (m_surface) { - //QOpenGLContext *renderThreadContext = 0; - shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>()); - } m_glContext = new QOpenGLContext(); m_glContext->setFormat(m_offscreenSurface->requestedFormat()); diff --git a/src/plugins/directshow/camera/dscameracontrol.cpp b/src/plugins/directshow/camera/dscameracontrol.cpp index 8d923da9f8e7b58c467f3a8eb90e1044f11a26a5..7a2b46a2cfd0c033b3b52cc7669f19d0c025d970 100644 --- a/src/plugins/directshow/camera/dscameracontrol.cpp +++ b/src/plugins/directshow/camera/dscameracontrol.cpp @@ -51,8 +51,12 @@ DSCameraControl::DSCameraControl(QObject *parent) , m_captureMode(QCamera::CaptureStillImage) { m_session = qobject_cast<DSCameraSession*>(parent); - connect(m_session, SIGNAL(statusChanged(QCamera::Status)), - this, SIGNAL(statusChanged(QCamera::Status))); + connect(m_session, &DSCameraSession::statusChanged, + [&](QCamera::Status status) { + if (status == QCamera::UnloadedStatus) + m_state = QCamera::UnloadedState; + emit statusChanged(status); + }); connect(m_session, &DSCameraSession::cameraError, this, &DSCameraControl::error); } diff --git a/src/plugins/directshow/camera/dscamerasession.cpp b/src/plugins/directshow/camera/dscamerasession.cpp index ca3c47cb887f5bb012b9ec6545c2e03c7e23163b..c309359eda903ec84581f66a3454891502f63d01 100644 --- a/src/plugins/directshow/camera/dscamerasession.cpp +++ b/src/plugins/directshow/camera/dscamerasession.cpp @@ -76,6 +76,30 @@ DSCameraSession::DSCameraSession(QObject *parent) { connect(this, SIGNAL(statusChanged(QCamera::Status)), this, SLOT(updateReadyForCapture())); + + m_deviceLostEventTimer.setSingleShot(true); + connect(&m_deviceLostEventTimer, &QTimer::timeout, [&]() { + IMediaEvent *pEvent = com_cast<IMediaEvent>(m_filterGraph, IID_IMediaEvent); + if (!pEvent) + return; + + long eventCode; + LONG_PTR param1; + LONG_PTR param2; + while (pEvent->GetEvent(&eventCode, ¶m1, ¶m2, 0) == S_OK) { + switch (eventCode) { + case EC_DEVICE_LOST: + unload(); + break; + default: + break; + } + + pEvent->FreeEventParams(eventCode, param1, param2); + } + + pEvent->Release(); + }); } DSCameraSession::~DSCameraSession() @@ -208,8 +232,8 @@ QVariant DSCameraSession::imageProcessingParameter( QCameraImageProcessingControl::ProcessingParameter parameter) const { if (!m_graphBuilder) { - qWarning() << "failed to access to the graph builder"; - return QVariant(); + auto it = m_pendingImageProcessingParametrs.find(parameter); + return it != m_pendingImageProcessingParametrs.end() ? it.value() : QVariant(); } const QCameraImageProcessingControl::ProcessingParameter resultingParameter = @@ -249,7 +273,7 @@ void DSCameraSession::setImageProcessingParameter( const QVariant &value) { if (!m_graphBuilder) { - qWarning() << "failed to access to the graph builder"; + m_pendingImageProcessingParametrs.insert(parameter, value); return; } @@ -582,6 +606,10 @@ void DSCameraSession::onFrameAvailable(double time, const QByteArray &data) m_presentMutex.lock(); + // If no frames provided from ISampleGrabber for some time + // the device might be potentially unplugged. + m_deviceLostEventTimer.start(100); + // (We should be getting only RGB32 data) int stride = m_previewSize.width() * 4; @@ -960,6 +988,13 @@ void DSCameraSession::updateImageProcessingParametersInfos() } pVideoProcAmp->Release(); + + for (auto it = m_pendingImageProcessingParametrs.cbegin(); + it != m_pendingImageProcessingParametrs.cend(); + ++it) { + setImageProcessingParameter(it.key(), it.value()); + } + m_pendingImageProcessingParametrs.clear(); } bool DSCameraSession::connectGraph() diff --git a/src/plugins/directshow/camera/dscamerasession.h b/src/plugins/directshow/camera/dscamerasession.h index 07c3d9ef9ac4558d0630a2e2d04fcbf730e618dd..433db8994078c524c3f71b491c0ba8966a72e363 100644 --- a/src/plugins/directshow/camera/dscamerasession.h +++ b/src/plugins/directshow/camera/dscamerasession.h @@ -44,7 +44,7 @@ #include <QTime> #include <QUrl> #include <QMutex> - +#include <QTimer> #include <qcamera.h> #include <QtMultimedia/qvideoframe.h> #include <QtMultimedia/qabstractvideosurface.h> @@ -229,6 +229,9 @@ private: // Internal state QCamera::Status m_status; + QTimer m_deviceLostEventTimer; + + QMap<QCameraImageProcessingControl::ProcessingParameter, QVariant> m_pendingImageProcessingParametrs; friend class SampleGrabberCallbackPrivate; };