• Val Doroshchuk's avatar
    DirectShow: Notify if camera has been unplugged · 1b58b96b
    Val Doroshchuk authored
    
    If being used camera has been disconnected, state and status will
    remain like it is still active. Also no events are sent.
    Previously to fix this the camera needed to be unloaded and loaded again manually.
    
    IMediaEvent provides an ability to catch device removal notification
    with EC_DEVICE_LOST event.
    
    Since ISampleGrabber is used to get buffers.
    Added a fix to check if no buffers received for some time
    afterwards check for EC_DEVICE_LOST event.
    In case if the device is lost, the camera should be unloaded.
    
    Change-Id: I3a5edf00ce8ee25d8b06800fdad833a722bdba0d
    Task-number: QTBUG-68035
    Reviewed-by: default avatarVaL Doroshchuk <valentyn.doroshchuk@qt.io>
    Reviewed-by: default avatarMaurice Kalinowski <maurice.kalinowski@qt.io>
    1b58b96b
dscamerasession.cpp 39.70 KiB
/****************************************************************************
**
** Copyright (C) 2016 The Qt Company Ltd.
** Contact: https://www.qt.io/licensing/
**
** This file is part of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 3 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL3 included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 3 requirements
** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 2.0 or (at your option) the GNU General
** Public license version 3 or any later version approved by the KDE Free
** Qt Foundation. The licenses are as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-2.0.html and
** https://www.gnu.org/licenses/gpl-3.0.html.
** $QT_END_LICENSE$
****************************************************************************/
#include <QtCore/qdebug.h>
#include <QFile>
#include <QtConcurrent/QtConcurrentRun>
#include <QtMultimedia/qabstractvideobuffer.h>
#include <QtMultimedia/qvideosurfaceformat.h>
#include <QtMultimedia/qcameraimagecapture.h>
#include <private/qmemoryvideobuffer_p.h>
#include "dscamerasession.h"
#include "dsvideorenderer.h"
#include "directshowsamplegrabber.h"
#include "directshowcameraglobal.h"
#include "directshowmediatype.h"
#include "directshowutils.h"
#include "directshowvideoprobecontrol.h"
QT_BEGIN_NAMESPACE
DSCameraSession::DSCameraSession(QObject *parent)
    : QObject(parent)
    , m_graphBuilder(nullptr)
    , m_filterGraph(nullptr)
    , m_sourceDeviceName(QLatin1String("default"))
    , m_sourceFilter(nullptr)
    , m_needsHorizontalMirroring(false)
    , m_previewSampleGrabber(nullptr)
    , m_nullRendererFilter(nullptr)
    , m_previewStarted(false)
    , m_surface(nullptr)
    , m_previewPixelFormat(QVideoFrame::Format_Invalid)
    , m_readyForCapture(false)
7172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140
, m_imageIdCounter(0) , m_currentImageId(-1) , m_captureDestinations(QCameraImageCapture::CaptureToFile) , m_videoProbeControl(nullptr) , m_status(QCamera::UnloadedStatus) { connect(this, SIGNAL(statusChanged(QCamera::Status)), this, SLOT(updateReadyForCapture())); m_deviceLostEventTimer.setSingleShot(true); connect(&m_deviceLostEventTimer, &QTimer::timeout, [&]() { IMediaEvent *pEvent = com_cast<IMediaEvent>(m_filterGraph, IID_IMediaEvent); if (!pEvent) return; long eventCode; LONG_PTR param1; LONG_PTR param2; while (pEvent->GetEvent(&eventCode, &param1, &param2, 0) == S_OK) { switch (eventCode) { case EC_DEVICE_LOST: unload(); break; default: break; } pEvent->FreeEventParams(eventCode, param1, param2); } pEvent->Release(); }); } DSCameraSession::~DSCameraSession() { unload(); } void DSCameraSession::setSurface(QAbstractVideoSurface* surface) { m_surface = surface; } void DSCameraSession::setDevice(const QString &device) { m_sourceDeviceName = device; } QCameraViewfinderSettings DSCameraSession::viewfinderSettings() const { return m_status == QCamera::ActiveStatus ? m_actualViewfinderSettings : m_viewfinderSettings; } void DSCameraSession::setViewfinderSettings(const QCameraViewfinderSettings &settings) { m_viewfinderSettings = settings; } qreal DSCameraSession::scaledImageProcessingParameterValue( const ImageProcessingParameterInfo &sourceValueInfo) { if (sourceValueInfo.currentValue == sourceValueInfo.defaultValue) { return 0.0f; } else if (sourceValueInfo.currentValue < sourceValueInfo.defaultValue) { return ((sourceValueInfo.currentValue - sourceValueInfo.minimumValue) / qreal(sourceValueInfo.defaultValue - sourceValueInfo.minimumValue)) + (-1.0f); } else { return ((sourceValueInfo.currentValue - sourceValueInfo.defaultValue)
141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210
/ qreal(sourceValueInfo.maximumValue - sourceValueInfo.defaultValue)); } } qint32 DSCameraSession::sourceImageProcessingParameterValue( qreal scaledValue, const ImageProcessingParameterInfo &valueRange) { if (qFuzzyIsNull(scaledValue)) { return valueRange.defaultValue; } else if (scaledValue < 0.0f) { return ((scaledValue - (-1.0f)) * (valueRange.defaultValue - valueRange.minimumValue)) + valueRange.minimumValue; } else { return (scaledValue * (valueRange.maximumValue - valueRange.defaultValue)) + valueRange.defaultValue; } } static QCameraImageProcessingControl::ProcessingParameter searchRelatedResultingParameter( QCameraImageProcessingControl::ProcessingParameter sourceParameter) { if (sourceParameter == QCameraImageProcessingControl::WhiteBalancePreset) return QCameraImageProcessingControl::ColorTemperature; return sourceParameter; } bool DSCameraSession::isImageProcessingParameterSupported( QCameraImageProcessingControl::ProcessingParameter parameter) const { const QCameraImageProcessingControl::ProcessingParameter resultingParameter = searchRelatedResultingParameter(parameter); return m_imageProcessingParametersInfos.contains(resultingParameter); } bool DSCameraSession::isImageProcessingParameterValueSupported( QCameraImageProcessingControl::ProcessingParameter parameter, const QVariant &value) const { const QCameraImageProcessingControl::ProcessingParameter resultingParameter = searchRelatedResultingParameter(parameter); QMap<QCameraImageProcessingControl::ProcessingParameter, ImageProcessingParameterInfo>::const_iterator sourceValueInfo = m_imageProcessingParametersInfos.constFind(resultingParameter); if (sourceValueInfo == m_imageProcessingParametersInfos.constEnd()) return false; switch (parameter) { case QCameraImageProcessingControl::WhiteBalancePreset: { const QCameraImageProcessing::WhiteBalanceMode checkedValue = value.value<QCameraImageProcessing::WhiteBalanceMode>(); // Supports only the Manual and the Auto values if (checkedValue != QCameraImageProcessing::WhiteBalanceManual && checkedValue != QCameraImageProcessing::WhiteBalanceAuto) { return false; } } break; case QCameraImageProcessingControl::ColorTemperature: { const qint32 checkedValue = value.toInt(); if (checkedValue < (*sourceValueInfo).minimumValue || checkedValue > (*sourceValueInfo).maximumValue) { return false; } } break;
211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280
case QCameraImageProcessingControl::ContrastAdjustment: // falling back case QCameraImageProcessingControl::SaturationAdjustment: // falling back case QCameraImageProcessingControl::BrightnessAdjustment: // falling back case QCameraImageProcessingControl::SharpeningAdjustment: { const qint32 sourceValue = sourceImageProcessingParameterValue( value.toReal(), (*sourceValueInfo)); if (sourceValue < (*sourceValueInfo).minimumValue || sourceValue > (*sourceValueInfo).maximumValue) return false; } break; default: return false; } return true; } QVariant DSCameraSession::imageProcessingParameter( QCameraImageProcessingControl::ProcessingParameter parameter) const { if (!m_graphBuilder) { auto it = m_pendingImageProcessingParametrs.find(parameter); return it != m_pendingImageProcessingParametrs.end() ? it.value() : QVariant(); } const QCameraImageProcessingControl::ProcessingParameter resultingParameter = searchRelatedResultingParameter(parameter); QMap<QCameraImageProcessingControl::ProcessingParameter, ImageProcessingParameterInfo>::const_iterator sourceValueInfo = m_imageProcessingParametersInfos.constFind(resultingParameter); if (sourceValueInfo == m_imageProcessingParametersInfos.constEnd()) return QVariant(); switch (parameter) { case QCameraImageProcessingControl::WhiteBalancePreset: return QVariant::fromValue<QCameraImageProcessing::WhiteBalanceMode>( (*sourceValueInfo).capsFlags == VideoProcAmp_Flags_Auto ? QCameraImageProcessing::WhiteBalanceAuto : QCameraImageProcessing::WhiteBalanceManual); case QCameraImageProcessingControl::ColorTemperature: return QVariant::fromValue<qint32>((*sourceValueInfo).currentValue); case QCameraImageProcessingControl::ContrastAdjustment: // falling back case QCameraImageProcessingControl::SaturationAdjustment: // falling back case QCameraImageProcessingControl::BrightnessAdjustment: // falling back case QCameraImageProcessingControl::SharpeningAdjustment: return scaledImageProcessingParameterValue((*sourceValueInfo)); default: return QVariant(); } } void DSCameraSession::setImageProcessingParameter( QCameraImageProcessingControl::ProcessingParameter parameter, const QVariant &value) { if (!m_graphBuilder) { m_pendingImageProcessingParametrs.insert(parameter, value); return; } const QCameraImageProcessingControl::ProcessingParameter resultingParameter =
281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
searchRelatedResultingParameter(parameter); QMap<QCameraImageProcessingControl::ProcessingParameter, ImageProcessingParameterInfo>::iterator sourceValueInfo = m_imageProcessingParametersInfos.find(resultingParameter); if (sourceValueInfo == m_imageProcessingParametersInfos.constEnd()) return; LONG sourceValue = 0; LONG capsFlags = VideoProcAmp_Flags_Manual; switch (parameter) { case QCameraImageProcessingControl::WhiteBalancePreset: { const QCameraImageProcessing::WhiteBalanceMode checkedValue = value.value<QCameraImageProcessing::WhiteBalanceMode>(); // Supports only the Manual and the Auto values if (checkedValue == QCameraImageProcessing::WhiteBalanceManual) capsFlags = VideoProcAmp_Flags_Manual; else if (checkedValue == QCameraImageProcessing::WhiteBalanceAuto) capsFlags = VideoProcAmp_Flags_Auto; else return; sourceValue = ((*sourceValueInfo).hasBeenExplicitlySet) ? (*sourceValueInfo).currentValue : (*sourceValueInfo).defaultValue; } break; case QCameraImageProcessingControl::ColorTemperature: sourceValue = value.isValid() ? value.value<qint32>() : (*sourceValueInfo).defaultValue; capsFlags = (*sourceValueInfo).capsFlags; break; case QCameraImageProcessingControl::ContrastAdjustment: // falling back case QCameraImageProcessingControl::SaturationAdjustment: // falling back case QCameraImageProcessingControl::BrightnessAdjustment: // falling back case QCameraImageProcessingControl::SharpeningAdjustment: if (value.isValid()) { sourceValue = sourceImageProcessingParameterValue( value.toReal(), (*sourceValueInfo)); } else { sourceValue = (*sourceValueInfo).defaultValue; } break; default: return; } IAMVideoProcAmp *pVideoProcAmp = NULL; HRESULT hr = m_graphBuilder->FindInterface( NULL, NULL, m_sourceFilter, IID_IAMVideoProcAmp, reinterpret_cast<void**>(&pVideoProcAmp) ); if (FAILED(hr) || !pVideoProcAmp) { qWarning() << "failed to find the video proc amp"; return; } hr = pVideoProcAmp->Set( (*sourceValueInfo).videoProcAmpProperty, sourceValue,
351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420
capsFlags); pVideoProcAmp->Release(); if (FAILED(hr)) { qWarning() << "failed to set the parameter value"; } else { (*sourceValueInfo).capsFlags = capsFlags; (*sourceValueInfo).hasBeenExplicitlySet = true; (*sourceValueInfo).currentValue = sourceValue; } } bool DSCameraSession::getCameraControlInterface(IAMCameraControl **cameraControl) const { if (!m_sourceFilter) { qCDebug(qtDirectShowPlugin, "getCameraControlInterface failed: No capture filter!"); return false; } if (!cameraControl) { qCDebug(qtDirectShowPlugin, "getCameraControlInterface failed: Invalid out argument!"); return false; } if (FAILED(m_sourceFilter->QueryInterface(IID_IAMCameraControl, reinterpret_cast<void **>(cameraControl)))) { qCDebug(qtDirectShowPlugin, "getCameraControlInterface failed: Querying camera control failed!"); return false; } return true; } bool DSCameraSession::isCaptureDestinationSupported(QCameraImageCapture::CaptureDestinations destination) const { return destination & (QCameraImageCapture::CaptureToFile | QCameraImageCapture::CaptureToBuffer); } QCameraImageCapture::CaptureDestinations DSCameraSession::captureDestination() const { return m_captureDestinations; } void DSCameraSession::setCaptureDestination(QCameraImageCapture::CaptureDestinations destinations) { if (m_captureDestinations == destinations) return; m_captureDestinations = destinations; Q_EMIT captureDestinationChanged(m_captureDestinations); } void DSCameraSession::addVideoProbe(DirectShowVideoProbeControl *probe) { const QMutexLocker locker(&m_probeMutex); m_videoProbeControl = probe; } void DSCameraSession::removeVideoProbe(DirectShowVideoProbeControl *probe) { Q_UNUSED(probe); Q_ASSERT(m_videoProbeControl == probe); const QMutexLocker locker(&m_probeMutex); m_videoProbeControl = nullptr; } bool DSCameraSession::load() { unload();
421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490
setStatus(QCamera::LoadingStatus); bool succeeded = createFilterGraph(); if (succeeded) setStatus(QCamera::LoadedStatus); else setStatus(QCamera::UnavailableStatus); return succeeded; } bool DSCameraSession::unload() { if (!m_graphBuilder) return false; if (!stopPreview()) return false; setStatus(QCamera::UnloadingStatus); m_previewSampleGrabber->deleteLater(); m_previewSampleGrabber = nullptr; m_needsHorizontalMirroring = false; m_supportedViewfinderSettings.clear(); m_supportedFormats.clear(); SAFE_RELEASE(m_sourceFilter); SAFE_RELEASE(m_nullRendererFilter); SAFE_RELEASE(m_filterGraph); SAFE_RELEASE(m_graphBuilder); setStatus(QCamera::UnloadedStatus); return true; } bool DSCameraSession::startPreview() { if (m_previewStarted) return true; if (!m_graphBuilder) return false; setStatus(QCamera::StartingStatus); QString errorString; HRESULT hr = S_OK; IMediaControl* pControl = 0; if (!configurePreviewFormat()) { errorString = tr("Failed to configure preview format"); goto failed; } if (!connectGraph()) { errorString = tr("Failed to connect graph"); goto failed; } if (m_surface) m_surface->start(m_previewSurfaceFormat); hr = m_filterGraph->QueryInterface(IID_IMediaControl, (void**)&pControl); if (FAILED(hr)) { errorString = tr("Failed to get stream control"); goto failed; } hr = pControl->Run();
491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560
pControl->Release(); if (FAILED(hr)) { errorString = tr("Failed to start"); goto failed; } setStatus(QCamera::ActiveStatus); m_previewStarted = true; return true; failed: // go back to a clean state if (m_surface && m_surface->isActive()) m_surface->stop(); disconnectGraph(); setError(QCamera::CameraError, errorString); return false; } bool DSCameraSession::stopPreview() { if (!m_previewStarted) return true; setStatus(QCamera::StoppingStatus); if (m_previewSampleGrabber) m_previewSampleGrabber->stop(); QString errorString; IMediaControl* pControl = 0; HRESULT hr = m_filterGraph->QueryInterface(IID_IMediaControl, (void**)&pControl); if (FAILED(hr)) { errorString = tr("Failed to get stream control"); goto failed; } hr = pControl->Stop(); pControl->Release(); if (FAILED(hr)) { errorString = tr("Failed to stop"); goto failed; } disconnectGraph(); m_sourceFormat.clear(); m_previewStarted = false; setStatus(QCamera::LoadedStatus); return true; failed: setError(QCamera::CameraError, errorString); return false; } void DSCameraSession::setError(int error, const QString &errorString) { emit cameraError(error, errorString); setStatus(QCamera::UnloadedStatus); } void DSCameraSession::setStatus(QCamera::Status status) { if (m_status == status) return; m_status = status;
561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630
emit statusChanged(m_status); } bool DSCameraSession::isReadyForCapture() { return m_readyForCapture; } void DSCameraSession::updateReadyForCapture() { bool isReady = (m_status == QCamera::ActiveStatus && m_imageCaptureFileName.isEmpty()); if (isReady != m_readyForCapture) { m_readyForCapture = isReady; emit readyForCaptureChanged(isReady); } } int DSCameraSession::captureImage(const QString &fileName) { ++m_imageIdCounter; if (!m_readyForCapture) { emit captureError(m_imageIdCounter, QCameraImageCapture::NotReadyError, tr("Camera not ready for capture")); return m_imageIdCounter; } m_imageCaptureFileName = m_fileNameGenerator.generateFileName(fileName, QMediaStorageLocation::Pictures, QLatin1String("IMG_"), QLatin1String("jpg")); updateReadyForCapture(); m_captureMutex.lock(); m_currentImageId = m_imageIdCounter; m_captureMutex.unlock(); return m_imageIdCounter; } void DSCameraSession::onFrameAvailable(double time, const QByteArray &data) { // !!! Not called on the main thread Q_UNUSED(time); m_presentMutex.lock(); // If no frames provided from ISampleGrabber for some time // the device might be potentially unplugged. m_deviceLostEventTimer.start(100); // (We should be getting only RGB32 data) int stride = m_previewSize.width() * 4; // In case the source produces frames faster than we can display them, // only keep the most recent one m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(data, stride), m_previewSize, m_previewPixelFormat); m_presentMutex.unlock(); { const QMutexLocker locker(&m_probeMutex); if (m_currentFrame.isValid() && m_videoProbeControl) Q_EMIT m_videoProbeControl->videoFrameProbed(m_currentFrame); } // Image capture
631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700
QMutexLocker locker(&m_captureMutex); if (m_currentImageId != -1 && !m_capturedFrame.isValid()) { m_capturedFrame = m_currentFrame; QMetaObject::invokeMethod(this, "imageExposed", Qt::QueuedConnection, Q_ARG(int, m_currentImageId)); } QMetaObject::invokeMethod(this, "presentFrame", Qt::QueuedConnection); } void DSCameraSession::presentFrame() { m_presentMutex.lock(); if (m_currentFrame.isValid() && m_surface) { m_surface->present(m_currentFrame); m_currentFrame = QVideoFrame(); } m_presentMutex.unlock(); QImage captureImage; const int captureId = m_currentImageId; m_captureMutex.lock(); if (m_capturedFrame.isValid()) { Q_ASSERT(m_previewPixelFormat == QVideoFrame::Format_RGB32); m_capturedFrame.map(QAbstractVideoBuffer::ReadOnly); captureImage = QImage(m_capturedFrame.bits(), m_previewSize.width(), m_previewSize.height(), QImage::Format_RGB32); captureImage = captureImage.mirrored(m_needsHorizontalMirroring); // also causes a deep copy of the data m_capturedFrame.unmap(); QtConcurrent::run(this, &DSCameraSession::processCapturedImage, m_currentImageId, m_captureDestinations, captureImage, m_imageCaptureFileName); m_imageCaptureFileName.clear(); m_currentImageId = -1; m_capturedFrame = QVideoFrame(); } m_captureMutex.unlock(); if (!captureImage.isNull()) emit imageCaptured(captureId, captureImage); updateReadyForCapture(); } void DSCameraSession::processCapturedImage(int id, QCameraImageCapture::CaptureDestinations captureDestinations, const QImage &image, const QString &path) { if (captureDestinations & QCameraImageCapture::CaptureToFile) { if (image.save(path, "JPG")) { Q_EMIT imageSaved(id, path); } else { Q_EMIT captureError(id, QCameraImageCapture::ResourceError, tr("Could not save image to file.")); } } if (captureDestinations & QCameraImageCapture::CaptureToBuffer)
701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770
Q_EMIT imageAvailable(id, QVideoFrame(image)); } bool DSCameraSession::createFilterGraph() { // Previously containered in <qedit.h>. static const CLSID cLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } }; QString errorString; HRESULT hr; IMoniker* pMoniker = NULL; ICreateDevEnum* pDevEnum = NULL; IEnumMoniker* pEnum = NULL; // Create the filter graph hr = CoCreateInstance(CLSID_FilterGraph,NULL,CLSCTX_INPROC, IID_IGraphBuilder, (void**)&m_filterGraph); if (FAILED(hr)) { errorString = tr("Failed to create filter graph"); goto failed; } // Create the capture graph builder hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (void**)&m_graphBuilder); if (FAILED(hr)) { errorString = tr("Failed to create graph builder"); goto failed; } // Attach the filter graph to the capture graph hr = m_graphBuilder->SetFiltergraph(m_filterGraph); if (FAILED(hr)) { errorString = tr("Failed to connect capture graph and filter graph"); goto failed; } // Find the Capture device hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, reinterpret_cast<void**>(&pDevEnum)); if (SUCCEEDED(hr)) { // Create an enumerator for the video capture category hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0); pDevEnum->Release(); if (S_OK == hr) { pEnum->Reset(); IMalloc *mallocInterface = 0; CoGetMalloc(1, (LPMALLOC*)&mallocInterface); //go through and find all video capture devices while (pEnum->Next(1, &pMoniker, NULL) == S_OK) { BSTR strName = 0; hr = pMoniker->GetDisplayName(NULL, NULL, &strName); if (SUCCEEDED(hr)) { QString output = QString::fromWCharArray(strName); mallocInterface->Free(strName); if (m_sourceDeviceName.contains(output)) { hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_sourceFilter); if (SUCCEEDED(hr)) { pMoniker->Release(); break; } } } pMoniker->Release(); } mallocInterface->Release(); if (NULL == m_sourceFilter) {
771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840
if (m_sourceDeviceName.contains(QLatin1String("default"))) { pEnum->Reset(); // still have to loop to discard bind to storage failure case while (pEnum->Next(1, &pMoniker, NULL) == S_OK) { IPropertyBag *pPropBag = 0; hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag)); if (FAILED(hr)) { pMoniker->Release(); continue; // Don't panic yet } // No need to get the description, just grab it hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_sourceFilter); pPropBag->Release(); pMoniker->Release(); if (SUCCEEDED(hr)) { break; // done, stop looping through } else { qWarning() << "Object bind failed"; } } } } pEnum->Release(); } } if (!m_sourceFilter) { errorString = tr("No capture device found"); goto failed; } // Sample grabber filter if (!m_previewSampleGrabber) { m_previewSampleGrabber = new DirectShowSampleGrabber; connect(m_previewSampleGrabber, &DirectShowSampleGrabber::bufferAvailable, this, &DSCameraSession::onFrameAvailable); } // Null renderer. Input connected to the sample grabber's output. Simply // discard the samples it receives. hr = CoCreateInstance(cLSID_NullRenderer, NULL, CLSCTX_INPROC, IID_IBaseFilter, (void**)&m_nullRendererFilter); if (FAILED(hr)) { errorString = tr("Failed to create null renderer"); goto failed; } updateSourceCapabilities(); return true; failed: m_needsHorizontalMirroring = false; SAFE_RELEASE(m_sourceFilter); SAFE_RELEASE(m_nullRendererFilter); SAFE_RELEASE(m_filterGraph); SAFE_RELEASE(m_graphBuilder); setError(QCamera::CameraError, errorString); return false; } bool DSCameraSession::configurePreviewFormat()
841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910
{ // Resolve viewfinder settings int settingsIndex = 0; QCameraViewfinderSettings resolvedViewfinderSettings; for (const QCameraViewfinderSettings &s : qAsConst(m_supportedViewfinderSettings)) { if ((m_viewfinderSettings.resolution().isEmpty() || m_viewfinderSettings.resolution() == s.resolution()) && (qFuzzyIsNull(m_viewfinderSettings.minimumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.minimumFrameRate(), (float)s.minimumFrameRate())) && (qFuzzyIsNull(m_viewfinderSettings.maximumFrameRate()) || qFuzzyCompare((float)m_viewfinderSettings.maximumFrameRate(), (float)s.maximumFrameRate())) && (m_viewfinderSettings.pixelFormat() == QVideoFrame::Format_Invalid || m_viewfinderSettings.pixelFormat() == s.pixelFormat()) && (m_viewfinderSettings.pixelAspectRatio().isEmpty() || m_viewfinderSettings.pixelAspectRatio() == s.pixelAspectRatio())) { resolvedViewfinderSettings = s; break; } ++settingsIndex; } if (resolvedViewfinderSettings.isNull()) { qWarning("Invalid viewfinder settings"); return false; } m_actualViewfinderSettings = resolvedViewfinderSettings; m_sourceFormat = m_supportedFormats[settingsIndex]; // Set frame rate. // We don't care about the minimumFrameRate, DirectShow only allows to set an // average frame rate, so set that to the maximumFrameRate. VIDEOINFOHEADER *videoInfo = reinterpret_cast<VIDEOINFOHEADER*>(m_sourceFormat->pbFormat); videoInfo->AvgTimePerFrame = 10000000 / resolvedViewfinderSettings.maximumFrameRate(); // We only support RGB32, if the capture source doesn't support // that format, the graph builder will automatically insert a // converter. if (m_surface && !m_surface->supportedPixelFormats(QAbstractVideoBuffer::NoHandle) .contains(QVideoFrame::Format_RGB32)) { qWarning() << "Video surface needs to support RGB32 pixel format"; return false; } m_previewPixelFormat = QVideoFrame::Format_RGB32; m_previewSize = resolvedViewfinderSettings.resolution(); m_previewSurfaceFormat = QVideoSurfaceFormat(m_previewSize, m_previewPixelFormat, QAbstractVideoBuffer::NoHandle); m_previewSurfaceFormat.setScanLineDirection(QVideoSurfaceFormat::BottomToTop); HRESULT hr; IAMStreamConfig* pConfig = 0; hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_sourceFilter, IID_IAMStreamConfig, (void**)&pConfig); if (FAILED(hr)) { qWarning() << "Failed to get config for capture device"; return false; } hr = pConfig->SetFormat(&m_sourceFormat); pConfig->Release(); if (FAILED(hr)) { qWarning() << "Unable to set video format on capture device"; return false; } // Set sample grabber format static const AM_MEDIA_TYPE grabberFormat { MEDIATYPE_Video, MEDIASUBTYPE_ARGB32, 0, 0, 0, FORMAT_VideoInfo, nullptr, 0, nullptr}; if (!m_previewSampleGrabber->setMediaType(&grabberFormat))
911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980
return false; m_previewSampleGrabber->start(DirectShowSampleGrabber::CallbackMethod::BufferCB); return true; } void DSCameraSession::updateImageProcessingParametersInfos() { if (!m_graphBuilder) { qWarning() << "failed to access to the graph builder"; return; } IAMVideoProcAmp *pVideoProcAmp = NULL; const HRESULT hr = m_graphBuilder->FindInterface( NULL, NULL, m_sourceFilter, IID_IAMVideoProcAmp, reinterpret_cast<void**>(&pVideoProcAmp) ); if (FAILED(hr) || !pVideoProcAmp) { qWarning() << "failed to find the video proc amp"; return; } for (int property = VideoProcAmp_Brightness; property <= VideoProcAmp_Gain; ++property) { QCameraImageProcessingControl::ProcessingParameter processingParameter; // not initialized switch (property) { case VideoProcAmp_Brightness: processingParameter = QCameraImageProcessingControl::BrightnessAdjustment; break; case VideoProcAmp_Contrast: processingParameter = QCameraImageProcessingControl::ContrastAdjustment; break; case VideoProcAmp_Saturation: processingParameter = QCameraImageProcessingControl::SaturationAdjustment; break; case VideoProcAmp_Sharpness: processingParameter = QCameraImageProcessingControl::SharpeningAdjustment; break; case VideoProcAmp_WhiteBalance: processingParameter = QCameraImageProcessingControl::ColorTemperature; break; default: // unsupported or not implemented yet parameter continue; } ImageProcessingParameterInfo sourceValueInfo; LONG steppingDelta = 0; HRESULT hr = pVideoProcAmp->GetRange( property, &sourceValueInfo.minimumValue, &sourceValueInfo.maximumValue, &steppingDelta, &sourceValueInfo.defaultValue, &sourceValueInfo.capsFlags); if (FAILED(hr)) continue; hr = pVideoProcAmp->Get( property, &sourceValueInfo.currentValue, &sourceValueInfo.capsFlags);
981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050
if (FAILED(hr)) continue; sourceValueInfo.videoProcAmpProperty = static_cast<VideoProcAmpProperty>(property); m_imageProcessingParametersInfos.insert(processingParameter, sourceValueInfo); } pVideoProcAmp->Release(); for (auto it = m_pendingImageProcessingParametrs.cbegin(); it != m_pendingImageProcessingParametrs.cend(); ++it) { setImageProcessingParameter(it.key(), it.value()); } m_pendingImageProcessingParametrs.clear(); } bool DSCameraSession::connectGraph() { HRESULT hr = m_filterGraph->AddFilter(m_sourceFilter, L"Capture Filter"); if (FAILED(hr)) { qWarning() << "failed to add capture filter to graph"; return false; } if (FAILED(m_filterGraph->AddFilter(m_previewSampleGrabber->filter(), L"Sample Grabber"))) { qWarning() << "failed to add sample grabber to graph"; return false; } hr = m_filterGraph->AddFilter(m_nullRendererFilter, L"Null Renderer"); if (FAILED(hr)) { qWarning() << "failed to add null renderer to graph"; return false; } hr = m_graphBuilder->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_sourceFilter, m_previewSampleGrabber->filter(), m_nullRendererFilter); if (FAILED(hr)) { qWarning() << "Graph failed to connect filters" << hr; return false; } return true; } void DSCameraSession::disconnectGraph() { // To avoid increasing the memory usage every time the graph is re-connected it's // important that all filters are released; also the ones added by the "Intelligent Connect". IEnumFilters *enumFilters = NULL; if (SUCCEEDED(m_filterGraph->EnumFilters(&enumFilters))) { IBaseFilter *filter = NULL; while (enumFilters->Next(1, &filter, NULL) == S_OK) { m_filterGraph->RemoveFilter(filter); enumFilters->Reset(); filter->Release(); } enumFilters->Release(); } } static bool qt_frameRateRangeGreaterThan(const QCamera::FrameRateRange &r1, const QCamera::FrameRateRange &r2) { return r1.maximumFrameRate > r2.maximumFrameRate; }
1051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120
void DSCameraSession::updateSourceCapabilities() { HRESULT hr; AM_MEDIA_TYPE *pmt = NULL; VIDEOINFOHEADER *pvi = NULL; VIDEO_STREAM_CONFIG_CAPS scc; IAMStreamConfig* pConfig = 0; m_supportedViewfinderSettings.clear(); m_needsHorizontalMirroring = false; m_supportedFormats.clear(); m_imageProcessingParametersInfos.clear(); IAMVideoControl *pVideoControl = 0; hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_sourceFilter, IID_IAMVideoControl, (void**)&pVideoControl); if (FAILED(hr)) { qWarning() << "Failed to get the video control"; } else { IPin *pPin = 0; if (!DirectShowUtils::getPin(m_sourceFilter, PINDIR_OUTPUT, &pPin, &hr)) { qWarning() << "Failed to get the pin for the video control"; } else { long supportedModes; hr = pVideoControl->GetCaps(pPin, &supportedModes); if (FAILED(hr)) { qWarning() << "Failed to get the supported modes of the video control"; } else if (supportedModes & VideoControlFlag_FlipHorizontal) { long mode; hr = pVideoControl->GetMode(pPin, &mode); if (FAILED(hr)) qWarning() << "Failed to get the mode of the video control"; else if (supportedModes & VideoControlFlag_FlipHorizontal) m_needsHorizontalMirroring = (mode & VideoControlFlag_FlipHorizontal); } pPin->Release(); } pVideoControl->Release(); } hr = m_graphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_sourceFilter, IID_IAMStreamConfig, (void**)&pConfig); if (FAILED(hr)) { qWarning() << "failed to get config on capture device"; return; } int iCount; int iSize; hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize); if (FAILED(hr)) { qWarning() << "failed to get capabilities"; return; } for (int iIndex = 0; iIndex < iCount; ++iIndex) { hr = pConfig->GetStreamCaps(iIndex, &pmt, reinterpret_cast<BYTE*>(&scc)); if (hr == S_OK) { QVideoFrame::PixelFormat pixelFormat = DirectShowMediaType::pixelFormatFromType(pmt); if (pmt->majortype == MEDIATYPE_Video && pmt->formattype == FORMAT_VideoInfo && pixelFormat != QVideoFrame::Format_Invalid) { pvi = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); QSize resolution(pvi->bmiHeader.biWidth, pvi->bmiHeader.biHeight);
11211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173
QList<QCamera::FrameRateRange> frameRateRanges; if (pVideoControl) { IPin *pPin = 0; if (!DirectShowUtils::getPin(m_sourceFilter, PINDIR_OUTPUT, &pPin, &hr)) { qWarning() << "Failed to get the pin for the video control"; } else { long listSize = 0; LONGLONG *frameRates = 0; SIZE size = { resolution.width(), resolution.height() }; if (SUCCEEDED(pVideoControl->GetFrameRateList(pPin, iIndex, size, &listSize, &frameRates))) { for (long i = 0; i < listSize; ++i) { qreal fr = qreal(10000000) / frameRates[i]; frameRateRanges.append(QCamera::FrameRateRange(fr, fr)); } // Make sure higher frame rates come first std::sort(frameRateRanges.begin(), frameRateRanges.end(), qt_frameRateRangeGreaterThan); } pPin->Release(); } } if (frameRateRanges.isEmpty()) { frameRateRanges.append(QCamera::FrameRateRange(qreal(10000000) / scc.MaxFrameInterval, qreal(10000000) / scc.MinFrameInterval)); } for (const QCamera::FrameRateRange &frameRateRange : qAsConst(frameRateRanges)) { QCameraViewfinderSettings settings; settings.setResolution(resolution); settings.setMinimumFrameRate(frameRateRange.minimumFrameRate); settings.setMaximumFrameRate(frameRateRange.maximumFrameRate); settings.setPixelFormat(pixelFormat); settings.setPixelAspectRatio(1, 1); m_supportedViewfinderSettings.append(settings); m_supportedFormats.append(DirectShowMediaType(*pmt)); } } DirectShowMediaType::deleteType(pmt); } } pConfig->Release(); updateImageProcessingParametersInfos(); } QT_END_NAMESPACE