mirror of
https://github.com/gnif/LookingGlass.git
synced 2024-12-26 07:23:40 +00:00
[host] move H264 out of DXGI into seperate class
This commit is contained in:
parent
48d3403c40
commit
e4cdc58399
@ -23,39 +23,7 @@ using namespace Capture;
|
||||
#include "common/debug.h"
|
||||
#include "common/memcpySSE.h"
|
||||
|
||||
#include <mfapi.h>
|
||||
#include <wmcodecdsp.h>
|
||||
#include <codecapi.h>
|
||||
#include <mferror.h>
|
||||
#include <evr.h>
|
||||
#include <mfapi.h>
|
||||
#include <mfidl.h>
|
||||
#include <mfreadwrite.h>
|
||||
|
||||
#if __MINGW32__
|
||||
|
||||
EXTERN_GUID(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 0xa634a91c, 0x822b, 0x41b9, 0xa4, 0x94, 0x4d, 0xe4, 0x64, 0x36, 0x12, 0xb0);
|
||||
EXTERN_GUID(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, 0xf81da2c, 0xb537, 0x4672, 0xa8, 0xb2, 0xa6, 0x81, 0xb1, 0x73, 0x7, 0xa3);
|
||||
EXTERN_GUID(MF_SA_D3D11_AWARE, 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
|
||||
|
||||
#define METransformUnknown 600
|
||||
#define METransformNeedInput 601
|
||||
#define METransformHaveOutput 602
|
||||
#define METransformDrainComplete 603
|
||||
#define METransformMarker 604
|
||||
#endif
|
||||
|
||||
template <class T> void SafeRelease(T **ppT)
|
||||
{
|
||||
if (*ppT)
|
||||
{
|
||||
(*ppT)->Release();
|
||||
*ppT = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
DXGI::DXGI() :
|
||||
m_cRef(1),
|
||||
m_options(NULL),
|
||||
m_initialized(false),
|
||||
m_dxgiFactory(),
|
||||
@ -65,7 +33,6 @@ DXGI::DXGI() :
|
||||
m_texture(),
|
||||
m_pointer(NULL)
|
||||
{
|
||||
MFStartup(MF_VERSION);
|
||||
}
|
||||
|
||||
DXGI::~DXGI()
|
||||
@ -266,186 +233,19 @@ bool DXGI::InitRawCapture()
|
||||
|
||||
bool DXGI::InitH264Capture()
|
||||
{
|
||||
HRESULT status;
|
||||
|
||||
MFT_REGISTER_TYPE_INFO typeInfo;
|
||||
IMFActivate **activationPointers;
|
||||
UINT32 activationPointerCount;
|
||||
|
||||
ID3D10MultithreadPtr mt(m_device);
|
||||
mt->SetMultithreadProtected(TRUE);
|
||||
SafeRelease(&mt);
|
||||
|
||||
m_encodeEvent = CreateEvent(NULL, TRUE , FALSE, NULL);
|
||||
m_shutdownEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
|
||||
InitializeCriticalSection(&m_encodeCS);
|
||||
|
||||
typeInfo.guidMajorType = MFMediaType_Video;
|
||||
typeInfo.guidSubtype = MFVideoFormat_H264;
|
||||
|
||||
status = MFTEnumEx(
|
||||
MFT_CATEGORY_VIDEO_ENCODER,
|
||||
MFT_ENUM_FLAG_HARDWARE,
|
||||
NULL,
|
||||
&typeInfo,
|
||||
&activationPointers,
|
||||
&activationPointerCount
|
||||
);
|
||||
if (FAILED(status))
|
||||
m_h264 = new MFT::H264();
|
||||
if (!m_h264->Initialize(m_device, m_width, m_height))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to enumerate encoder MFTs", status);
|
||||
delete m_h264;
|
||||
m_h264 = NULL;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (activationPointerCount == 0)
|
||||
{
|
||||
DEBUG_WINERROR("Hardware H264 MFT not available", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
{
|
||||
UINT32 nameLen = 0;
|
||||
activationPointers[0]->GetStringLength(MFT_FRIENDLY_NAME_Attribute, &nameLen);
|
||||
wchar_t * name = new wchar_t[nameLen+1];
|
||||
activationPointers[0]->GetString(MFT_FRIENDLY_NAME_Attribute, name, nameLen + 1, NULL);
|
||||
DEBUG_INFO("Using Encoder: %S", name);
|
||||
delete[] name;
|
||||
}
|
||||
|
||||
m_mfActivation = activationPointers[0];
|
||||
CoTaskMemFree(activationPointers);
|
||||
|
||||
status = m_mfActivation->ActivateObject(IID_PPV_ARGS(&m_mfTransform));
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to create H264 encoder MFT", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
IMFAttributesPtr attribs;
|
||||
m_mfTransform->GetAttributes(&attribs);
|
||||
attribs->SetUINT32 (MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS , TRUE);
|
||||
attribs->SetUINT32 (MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING , TRUE);
|
||||
attribs->SetUINT32 (MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, TRUE);
|
||||
attribs->SetUINT32 (MF_LOW_LATENCY , TRUE);
|
||||
|
||||
UINT32 d3d11Aware = 0;
|
||||
UINT32 async = 0;
|
||||
attribs->GetUINT32(MF_TRANSFORM_ASYNC, &async);
|
||||
attribs->GetUINT32(MF_SA_D3D11_AWARE, &d3d11Aware);
|
||||
if (async)
|
||||
attribs->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE);
|
||||
SafeRelease(&attribs);
|
||||
|
||||
status = m_mfTransform.QueryInterface(IID_PPV_ARGS(&m_mediaEventGen));
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to obtain th emedia event generator interface", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
status = m_mediaEventGen->BeginGetEvent(this, NULL);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the begin get event", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (d3d11Aware)
|
||||
{
|
||||
MFCreateDXGIDeviceManager(&m_resetToken, &m_mfDeviceManager);
|
||||
status = m_mfDeviceManager->ResetDevice(m_device, m_resetToken);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to call reset device", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
status = m_mfTransform->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, ULONG_PTR(m_mfDeviceManager.GetInterfacePtr()));
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the D3D manager", status);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
IMFMediaTypePtr outType;
|
||||
MFCreateMediaType(&outType);
|
||||
|
||||
outType->SetGUID (MF_MT_MAJOR_TYPE , MFMediaType_Video);
|
||||
outType->SetGUID (MF_MT_SUBTYPE , MFVideoFormat_H264);
|
||||
outType->SetUINT32(MF_MT_AVG_BITRATE , 384*1000);
|
||||
outType->SetUINT32(MF_MT_INTERLACE_MODE , MFVideoInterlace_Progressive);
|
||||
outType->SetUINT32(MF_MT_MPEG2_PROFILE , eAVEncH264VProfile_High);
|
||||
outType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
|
||||
|
||||
MFSetAttributeSize (outType, MF_MT_FRAME_SIZE , m_width, m_height);
|
||||
MFSetAttributeRatio(outType, MF_MT_FRAME_RATE , 30, 1);
|
||||
MFSetAttributeRatio(outType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
|
||||
|
||||
status = m_mfTransform->SetOutputType(0, outType, 0);
|
||||
SafeRelease(&outType);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the output media type on the H264 encoder MFT", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
IMFMediaTypePtr inType;
|
||||
MFCreateMediaType(&inType);
|
||||
|
||||
inType->SetGUID (MF_MT_MAJOR_TYPE , MFMediaType_Video );
|
||||
inType->SetGUID (MF_MT_SUBTYPE , MFVideoFormat_NV12);
|
||||
inType->SetUINT32(MF_MT_INTERLACE_MODE , MFVideoInterlace_Progressive);
|
||||
inType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
|
||||
|
||||
MFSetAttributeSize (inType, MF_MT_FRAME_SIZE , m_width, m_height);
|
||||
MFSetAttributeRatio(inType, MF_MT_FRAME_RATE , 30, 1);
|
||||
MFSetAttributeRatio(inType, MF_MT_PIXEL_ASPECT_RATIO, 1 , 1);
|
||||
|
||||
status = m_mfTransform->SetInputType(0, inType, 0);
|
||||
SafeRelease(&inType);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the input media type on the H264 encoder MFT", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH , 0);
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
|
||||
|
||||
#if 0
|
||||
status = MFTRegisterLocalByCLSID(
|
||||
__uuidof(CColorConvertDMO),
|
||||
MFT_CATEGORY_VIDEO_PROCESSOR,
|
||||
L"",
|
||||
MFT_ENUM_FLAG_SYNCMFT,
|
||||
0,
|
||||
NULL,
|
||||
0,
|
||||
NULL
|
||||
);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_ERROR("Failed to register color converter DSP");
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void DXGI::DeInitialize()
|
||||
{
|
||||
if (m_mediaEventGen)
|
||||
{
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0);
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
|
||||
while (WaitForSingleObject(m_shutdownEvent, INFINITE) != WAIT_OBJECT_0) {}
|
||||
m_mfTransform->DeleteInputStream(0);
|
||||
}
|
||||
|
||||
ReleaseFrame();
|
||||
|
||||
if (m_pointer)
|
||||
@ -461,10 +261,6 @@ void DXGI::DeInitialize()
|
||||
m_surfaceMapped = false;
|
||||
}
|
||||
|
||||
SafeRelease(&m_mediaEventGen);
|
||||
SafeRelease(&m_mfTransform);
|
||||
SafeRelease(&m_mfDeviceManager);
|
||||
|
||||
SafeRelease(&m_texture);
|
||||
SafeRelease(&m_dup);
|
||||
SafeRelease(&m_output);
|
||||
@ -472,21 +268,6 @@ void DXGI::DeInitialize()
|
||||
SafeRelease(&m_device);
|
||||
SafeRelease(&m_dxgiFactory);
|
||||
|
||||
if (m_encodeEvent)
|
||||
{
|
||||
CloseHandle(m_encodeEvent );
|
||||
CloseHandle(m_shutdownEvent);
|
||||
m_encodeEvent = NULL;
|
||||
m_shutdownEvent = NULL;
|
||||
DeleteCriticalSection(&m_encodeCS);
|
||||
}
|
||||
|
||||
if (m_mfActivation)
|
||||
{
|
||||
m_mfActivation->ShutdownObject();
|
||||
SafeRelease(&m_mfActivation);
|
||||
}
|
||||
|
||||
m_initialized = false;
|
||||
}
|
||||
|
||||
@ -505,91 +286,6 @@ size_t DXGI::GetMaxFrameSize()
|
||||
return (m_width * m_height * 4);
|
||||
}
|
||||
|
||||
STDMETHODIMP Capture::DXGI::Invoke(IMFAsyncResult * pAsyncResult)
|
||||
{
|
||||
HRESULT status, evtStatus;
|
||||
MediaEventType meType = MEUnknown;
|
||||
IMFMediaEvent *pEvent = NULL;
|
||||
|
||||
status = m_mediaEventGen->EndGetEvent(pAsyncResult, &pEvent);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("EndGetEvent", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
status = pEvent->GetStatus(&evtStatus);
|
||||
if (FAILED(status))
|
||||
{
|
||||
SafeRelease(&pEvent);
|
||||
DEBUG_WINERROR("GetStatus", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
if (FAILED(evtStatus))
|
||||
{
|
||||
SafeRelease(&pEvent);
|
||||
DEBUG_WINERROR("evtStatus", evtStatus);
|
||||
return evtStatus;
|
||||
}
|
||||
|
||||
status = pEvent->GetType(&meType);
|
||||
if (FAILED(status))
|
||||
{
|
||||
SafeRelease(&pEvent);
|
||||
DEBUG_WINERROR("GetType", status);
|
||||
return status;
|
||||
}
|
||||
SafeRelease(&pEvent);
|
||||
|
||||
switch (meType)
|
||||
{
|
||||
case METransformNeedInput:
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
m_encodeNeedsData = true;
|
||||
SetEvent(m_encodeEvent);
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
break;
|
||||
|
||||
case METransformHaveOutput:
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
m_encodeHasData = true;
|
||||
SetEvent(m_encodeEvent);
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
break;
|
||||
|
||||
case METransformDrainComplete:
|
||||
{
|
||||
status = m_mfTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("MFT_MESSAGE_COMMAND_FLUSH", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
SetEvent(m_shutdownEvent);
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
case MEError:
|
||||
DEBUG_INFO("err");
|
||||
break;
|
||||
|
||||
default:
|
||||
DEBUG_INFO("unk");
|
||||
break;
|
||||
}
|
||||
|
||||
status = m_mediaEventGen->BeginGetEvent(this, NULL);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("BeginGetEvent", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
GrabStatus Capture::DXGI::GrabFrameTexture(struct FrameInfo & frame, struct CursorInfo & cursor, ID3D11Texture2DPtr & texture, bool & timeout)
|
||||
{
|
||||
if (!m_initialized)
|
||||
@ -802,139 +498,43 @@ GrabStatus Capture::DXGI::GrabFrameH264(struct FrameInfo & frame, struct CursorI
|
||||
{
|
||||
while(true)
|
||||
{
|
||||
// only reset the event if there isn't work pending
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
if (!m_encodeHasData && !m_encodeNeedsData)
|
||||
ResetEvent(m_encodeEvent);
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
unsigned int events = m_h264->Process();
|
||||
if (events & MFT::H264_EVENT_ERROR)
|
||||
return GRAB_STATUS_ERROR;
|
||||
|
||||
switch (WaitForSingleObject(m_encodeEvent, 1000))
|
||||
if (events & MFT::H264_EVENT_NEEDS_DATA)
|
||||
{
|
||||
case WAIT_FAILED:
|
||||
DEBUG_WINERROR("Wait for encode event failed", GetLastError());
|
||||
return GRAB_STATUS_ERROR;
|
||||
|
||||
case WAIT_ABANDONED:
|
||||
DEBUG_ERROR("Wait abandoned");
|
||||
return GRAB_STATUS_ERROR;
|
||||
|
||||
case WAIT_TIMEOUT:
|
||||
continue;
|
||||
|
||||
case WAIT_OBJECT_0:
|
||||
break;
|
||||
}
|
||||
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
|
||||
HRESULT status;
|
||||
if (m_encodeNeedsData)
|
||||
{
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
GrabStatus result;
|
||||
ID3D11Texture2DPtr src;
|
||||
ID3D11Texture2DPtr texture;
|
||||
bool timeout;
|
||||
|
||||
while(true)
|
||||
result = GrabFrameTexture(frame, cursor, texture, timeout);
|
||||
if (timeout)
|
||||
{
|
||||
result = GrabFrameTexture(frame, cursor, src, timeout);
|
||||
if (result != GRAB_STATUS_OK)
|
||||
{
|
||||
ReleaseFrame();
|
||||
return result;
|
||||
}
|
||||
|
||||
//FIXME: we should send the last frame again
|
||||
if (!timeout)
|
||||
break;
|
||||
// FIXME: this is wrong, we need to encode the last frame again
|
||||
return GRAB_STATUS_TIMEOUT;
|
||||
}
|
||||
|
||||
// cursor data may be returned, only turn off the flag if we have a frame
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
m_encodeNeedsData = false;
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
|
||||
IMFMediaBufferPtr buffer;
|
||||
status = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), src, 0, FALSE, &buffer);
|
||||
SafeRelease(&src);
|
||||
if (FAILED(status))
|
||||
if (result != GRAB_STATUS_OK)
|
||||
{
|
||||
DEBUG_WINERROR("Failed to create DXGI surface buffer from texture", status);
|
||||
ReleaseFrame();
|
||||
return result;
|
||||
}
|
||||
|
||||
if (!m_h264->ProvideFrame(texture))
|
||||
return GRAB_STATUS_ERROR;
|
||||
}
|
||||
|
||||
IMF2DBufferPtr imfBuffer(buffer);
|
||||
DWORD length;
|
||||
imfBuffer->GetContiguousLength(&length);
|
||||
buffer->SetCurrentLength(length);
|
||||
SafeRelease(&imfBuffer);
|
||||
|
||||
IMFSamplePtr sample;
|
||||
MFCreateSample(&sample);
|
||||
sample->AddBuffer(buffer);
|
||||
|
||||
status = m_mfTransform->ProcessInput(0, sample, 0);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to process the input", status);
|
||||
return GRAB_STATUS_ERROR;
|
||||
}
|
||||
|
||||
SafeRelease(&src );
|
||||
SafeRelease(&sample);
|
||||
SafeRelease(&buffer);
|
||||
|
||||
SafeRelease(&texture);
|
||||
ReleaseFrame();
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
}
|
||||
|
||||
if (m_encodeHasData)
|
||||
if (events & MFT::H264_EVENT_HAS_DATA)
|
||||
{
|
||||
m_encodeHasData = false;
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
|
||||
MFT_OUTPUT_STREAM_INFO streamInfo;
|
||||
status = m_mfTransform->GetOutputStreamInfo(0, &streamInfo);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("GetOutputStreamInfo", status);
|
||||
if (!m_h264->GetFrame(frame.buffer, frame.bufferSize, frame.pitch))
|
||||
return GRAB_STATUS_ERROR;
|
||||
}
|
||||
|
||||
DWORD outStatus;
|
||||
MFT_OUTPUT_DATA_BUFFER outDataBuffer;
|
||||
outDataBuffer.dwStreamID = 0;
|
||||
outDataBuffer.dwStatus = 0;
|
||||
outDataBuffer.pEvents = NULL;
|
||||
outDataBuffer.pSample = NULL;
|
||||
|
||||
status = m_mfTransform->ProcessOutput(0, 1, &outDataBuffer, &outStatus);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("ProcessOutput", status);
|
||||
return GRAB_STATUS_ERROR;
|
||||
}
|
||||
|
||||
IMFMediaBufferPtr buffer;
|
||||
MFCreateAlignedMemoryBuffer((DWORD)frame.bufferSize, MF_128_BYTE_ALIGNMENT, &buffer);
|
||||
outDataBuffer.pSample->CopyToBuffer(buffer);
|
||||
SafeRelease(&outDataBuffer.pEvents);
|
||||
SafeRelease(&outDataBuffer.pSample);
|
||||
|
||||
BYTE *pixels;
|
||||
DWORD maxLen, curLen;
|
||||
buffer->Lock(&pixels, &maxLen, &curLen);
|
||||
memcpySSE(frame.buffer, pixels, curLen);
|
||||
buffer->Unlock();
|
||||
SafeRelease(&buffer);
|
||||
|
||||
frame.stride = 0;
|
||||
frame.pitch = curLen;
|
||||
|
||||
return GRAB_STATUS_OK;
|
||||
}
|
||||
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,41 +20,17 @@ Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
#pragma once
|
||||
|
||||
#include "ICapture.h"
|
||||
#include "Com.h"
|
||||
#include "MFT/H264.h"
|
||||
|
||||
#define W32_LEAN_AND_MEAN
|
||||
#include <windows.h>
|
||||
#include <shlwapi.h>
|
||||
#include <dxgi1_2.h>
|
||||
#include <d3d11.h>
|
||||
#include <mftransform.h>
|
||||
#include <stdio.h>
|
||||
#include <comdef.h>
|
||||
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIFactory1 , __uuidof(IDXGIFactory1 ));
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D11Device , __uuidof(ID3D11Device ));
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D11DeviceContext , __uuidof(ID3D11DeviceContext ));
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D10Multithread , __uuidof(ID3D10Multithread ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIDevice , __uuidof(IDXGIDevice ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIOutput1 , __uuidof(IDXGIOutput1 ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIOutput , __uuidof(IDXGIOutput ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIAdapter1 , __uuidof(IDXGIAdapter1 ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIOutputDuplication, __uuidof(IDXGIOutputDuplication));
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D11Texture2D , __uuidof(ID3D11Texture2D ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIResource , __uuidof(IDXGIResource ));
|
||||
|
||||
_COM_SMARTPTR_TYPEDEF(IMFActivate , __uuidof(IMFActivate ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFAttributes , __uuidof(IMFAttributes ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFDXGIDeviceManager , __uuidof(IMFDXGIDeviceManager ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFTransform , __uuidof(IMFTransform ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFMediaEventGenerator, __uuidof(IMFMediaEventGenerator));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFMediaType , __uuidof(IMFMediaType ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFSample , __uuidof(IMFSample ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFMediaBuffer , __uuidof(IMFMediaBuffer ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMF2DBuffer , __uuidof(IMF2DBuffer ));
|
||||
|
||||
namespace Capture
|
||||
{
|
||||
class DXGI : public ICapture, public IMFAsyncCallback
|
||||
class DXGI : public ICapture
|
||||
{
|
||||
public:
|
||||
DXGI();
|
||||
@ -81,37 +57,6 @@ namespace Capture
|
||||
size_t GetMaxFrameSize();
|
||||
enum GrabStatus GrabFrame(struct FrameInfo & frame, struct CursorInfo & cursor);
|
||||
|
||||
/*
|
||||
Junk needed for the horrid IMFAsyncCallback interface
|
||||
*/
|
||||
STDMETHODIMP QueryInterface(REFIID riid, void ** ppv)
|
||||
{
|
||||
if (riid == __uuidof(IUnknown) || riid == __uuidof(IMFAsyncCallback)) {
|
||||
*ppv = static_cast<IMFAsyncCallback*>(this);
|
||||
AddRef();
|
||||
return S_OK;
|
||||
} else {
|
||||
*ppv = NULL;
|
||||
return E_NOINTERFACE;
|
||||
}
|
||||
}
|
||||
|
||||
STDMETHODIMP_(ULONG) AddRef()
|
||||
{
|
||||
return InterlockedIncrement(&m_cRef);
|
||||
}
|
||||
|
||||
STDMETHODIMP_(ULONG) Release()
|
||||
{
|
||||
long cRef = InterlockedDecrement(&m_cRef);
|
||||
if (!cRef)
|
||||
delete this;
|
||||
return cRef;
|
||||
}
|
||||
|
||||
STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue) { return E_NOTIMPL; }
|
||||
STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
|
||||
|
||||
private:
|
||||
bool InitRawCapture();
|
||||
bool InitH264Capture();
|
||||
@ -121,7 +66,6 @@ namespace Capture
|
||||
GrabStatus GrabFrameRaw (struct FrameInfo & frame, struct CursorInfo & cursor);
|
||||
GrabStatus GrabFrameH264 (struct FrameInfo & frame, struct CursorInfo & cursor);
|
||||
|
||||
long m_cRef;
|
||||
CaptureOptions * m_options;
|
||||
|
||||
bool m_initialized;
|
||||
@ -139,18 +83,7 @@ namespace Capture
|
||||
ID3D11Texture2DPtr m_texture;
|
||||
D3D11_MAPPED_SUBRESOURCE m_mapping;
|
||||
bool m_surfaceMapped;
|
||||
|
||||
HANDLE m_encodeEvent;
|
||||
HANDLE m_shutdownEvent;
|
||||
bool m_encodeNeedsData;
|
||||
bool m_encodeHasData;
|
||||
CRITICAL_SECTION m_encodeCS;
|
||||
|
||||
UINT m_resetToken;
|
||||
IMFDXGIDeviceManagerPtr m_mfDeviceManager;
|
||||
IMFActivatePtr m_mfActivation;
|
||||
IMFTransformPtr m_mfTransform;
|
||||
IMFMediaEventGeneratorPtr m_mediaEventGen;
|
||||
MFT::H264 * m_h264;
|
||||
|
||||
BYTE * m_pointer;
|
||||
UINT m_pointerBufSize;
|
||||
|
56
host/Com.h
Normal file
56
host/Com.h
Normal file
@ -0,0 +1,56 @@
|
||||
/*
|
||||
Looking Glass - KVM FrameRelay (KVMFR) Client
|
||||
Copyright (C) 2017 Geoffrey McRae <geoff@hostfission.com>
|
||||
https://looking-glass.hostfission.com
|
||||
|
||||
This program is free software; you can redistribute it and/or modify it under
|
||||
the terms of the GNU General Public License as published by the Free Software
|
||||
Foundation; either version 2 of the License, or (at your option) any later
|
||||
version.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along with
|
||||
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
|
||||
Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <comdef.h>
|
||||
#include <dxgi1_2.h>
|
||||
#include <d3d11.h>
|
||||
#include <mftransform.h>
|
||||
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIFactory1 , __uuidof(IDXGIFactory1 ));
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D11Device , __uuidof(ID3D11Device ));
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D11DeviceContext , __uuidof(ID3D11DeviceContext ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIDevice , __uuidof(IDXGIDevice ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIOutput1 , __uuidof(IDXGIOutput1 ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIOutput , __uuidof(IDXGIOutput ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIAdapter1 , __uuidof(IDXGIAdapter1 ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIOutputDuplication, __uuidof(IDXGIOutputDuplication));
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D11Texture2D , __uuidof(ID3D11Texture2D ));
|
||||
_COM_SMARTPTR_TYPEDEF(IDXGIResource , __uuidof(IDXGIResource ));
|
||||
|
||||
_COM_SMARTPTR_TYPEDEF(ID3D10Multithread , __uuidof(ID3D10Multithread ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFActivate , __uuidof(IMFActivate ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFAttributes , __uuidof(IMFAttributes ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFDXGIDeviceManager , __uuidof(IMFDXGIDeviceManager ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFTransform , __uuidof(IMFTransform ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFMediaEventGenerator, __uuidof(IMFMediaEventGenerator));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFMediaType , __uuidof(IMFMediaType ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFSample , __uuidof(IMFSample ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMFMediaBuffer , __uuidof(IMFMediaBuffer ));
|
||||
_COM_SMARTPTR_TYPEDEF(IMF2DBuffer , __uuidof(IMF2DBuffer ));
|
||||
|
||||
|
||||
template <class T> void SafeRelease(T **ppT)
|
||||
{
|
||||
if (*ppT)
|
||||
{
|
||||
(*ppT)->Release();
|
||||
*ppT = NULL;
|
||||
}
|
||||
}
|
464
host/MFT/H264.cpp
Normal file
464
host/MFT/H264.cpp
Normal file
@ -0,0 +1,464 @@
|
||||
/*
|
||||
Looking Glass - KVM FrameRelay (KVMFR) Client
|
||||
Copyright (C) 2017 Geoffrey McRae <geoff@hostfission.com>
|
||||
https://looking-glass.hostfission.com
|
||||
|
||||
This program is free software; you can redistribute it and/or modify it under
|
||||
the terms of the GNU General Public License as published by the Free Software
|
||||
Foundation; either version 2 of the License, or (at your option) any later
|
||||
version.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along with
|
||||
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
|
||||
Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
*/
|
||||
|
||||
#include "MFT/H264.h"
|
||||
|
||||
#include "common/debug.h"
|
||||
#include "common/memcpySSE.h"
|
||||
|
||||
#include <mfapi.h>
|
||||
#include <mfidl.h>
|
||||
#include <mfreadwrite.h>
|
||||
#include <wmcodecdsp.h>
|
||||
#include <codecapi.h>
|
||||
#include <mferror.h>
|
||||
#include <evr.h>
|
||||
|
||||
using namespace MFT;
|
||||
|
||||
#if __MINGW32__
|
||||
EXTERN_GUID(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 0xa634a91c, 0x822b, 0x41b9, 0xa4, 0x94, 0x4d, 0xe4, 0x64, 0x36, 0x12, 0xb0);
|
||||
EXTERN_GUID(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, 0xf81da2c, 0xb537, 0x4672, 0xa8, 0xb2, 0xa6, 0x81, 0xb1, 0x73, 0x07, 0xa3);
|
||||
EXTERN_GUID(MF_SA_D3D11_AWARE, 0x206b4fc8, 0xfcf9, 0x4c51, 0xaf, 0xe3, 0x97, 0x64, 0x36, 0x9e, 0x33, 0xa0);
|
||||
|
||||
#define METransformUnknown 600
|
||||
#define METransformNeedInput 601
|
||||
#define METransformHaveOutput 602
|
||||
#define METransformDrainComplete 603
|
||||
#define METransformMarker 604
|
||||
#endif
|
||||
|
||||
MFT::H264::H264() :
|
||||
m_cRef(1)
|
||||
{
|
||||
MFStartup(MF_VERSION);
|
||||
}
|
||||
|
||||
MFT::H264::~H264()
|
||||
{
|
||||
DeInitialize();
|
||||
}
|
||||
|
||||
bool MFT::H264::Initialize(ID3D11DevicePtr device, unsigned int width, unsigned int height)
|
||||
{
|
||||
DeInitialize();
|
||||
|
||||
HRESULT status;
|
||||
|
||||
MFT_REGISTER_TYPE_INFO typeInfo;
|
||||
IMFActivate **activationPointers;
|
||||
UINT32 activationPointerCount;
|
||||
|
||||
m_device = device;
|
||||
m_width = width;
|
||||
m_height = height;
|
||||
|
||||
m_encodeEvent = CreateEvent(NULL, TRUE , FALSE, NULL);
|
||||
m_shutdownEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
|
||||
InitializeCriticalSection(&m_encodeCS);
|
||||
|
||||
ID3D10MultithreadPtr mt(m_device);
|
||||
mt->SetMultithreadProtected(TRUE);
|
||||
SafeRelease(&mt);
|
||||
|
||||
typeInfo.guidMajorType = MFMediaType_Video;
|
||||
typeInfo.guidSubtype = MFVideoFormat_H264;
|
||||
|
||||
status = MFTEnumEx(
|
||||
MFT_CATEGORY_VIDEO_ENCODER,
|
||||
MFT_ENUM_FLAG_HARDWARE,
|
||||
NULL,
|
||||
&typeInfo,
|
||||
&activationPointers,
|
||||
&activationPointerCount
|
||||
);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to enumerate encoder MFTs", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (activationPointerCount == 0)
|
||||
{
|
||||
DEBUG_WINERROR("Hardware H264 MFT not available", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
{
|
||||
UINT32 nameLen = 0;
|
||||
activationPointers[0]->GetStringLength(MFT_FRIENDLY_NAME_Attribute, &nameLen);
|
||||
wchar_t * name = new wchar_t[nameLen + 1];
|
||||
activationPointers[0]->GetString(MFT_FRIENDLY_NAME_Attribute, name, nameLen + 1, NULL);
|
||||
DEBUG_INFO("Using Encoder: %S", name);
|
||||
delete[] name;
|
||||
}
|
||||
|
||||
m_mfActivation = activationPointers[0];
|
||||
CoTaskMemFree(activationPointers);
|
||||
|
||||
status = m_mfActivation->ActivateObject(IID_PPV_ARGS(&m_mfTransform));
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to create H264 encoder MFT", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
IMFAttributesPtr attribs;
|
||||
m_mfTransform->GetAttributes(&attribs);
|
||||
attribs->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE);
|
||||
attribs->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, TRUE);
|
||||
attribs->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, TRUE);
|
||||
attribs->SetUINT32(MF_LOW_LATENCY, TRUE);
|
||||
|
||||
UINT32 d3d11Aware = 0;
|
||||
UINT32 async = 0;
|
||||
attribs->GetUINT32(MF_TRANSFORM_ASYNC, &async);
|
||||
attribs->GetUINT32(MF_SA_D3D11_AWARE, &d3d11Aware);
|
||||
if (async)
|
||||
attribs->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE);
|
||||
SafeRelease(&attribs);
|
||||
|
||||
status = m_mfTransform.QueryInterface(IID_PPV_ARGS(&m_mediaEventGen));
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to obtain th emedia event generator interface", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
status = m_mediaEventGen->BeginGetEvent(this, NULL);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the begin get event", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (d3d11Aware)
|
||||
{
|
||||
MFCreateDXGIDeviceManager(&m_resetToken, &m_mfDeviceManager);
|
||||
status = m_mfDeviceManager->ResetDevice(m_device, m_resetToken);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to call reset device", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
status = m_mfTransform->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, ULONG_PTR(m_mfDeviceManager.GetInterfacePtr()));
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the D3D manager", status);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
IMFMediaTypePtr outType;
|
||||
MFCreateMediaType(&outType);
|
||||
|
||||
outType->SetGUID (MF_MT_MAJOR_TYPE , MFMediaType_Video);
|
||||
outType->SetGUID (MF_MT_SUBTYPE , MFVideoFormat_H264);
|
||||
outType->SetUINT32(MF_MT_AVG_BITRATE , 384 * 1000);
|
||||
outType->SetUINT32(MF_MT_INTERLACE_MODE , MFVideoInterlace_Progressive);
|
||||
outType->SetUINT32(MF_MT_MPEG2_PROFILE , eAVEncH264VProfile_High);
|
||||
outType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
|
||||
|
||||
MFSetAttributeSize (outType, MF_MT_FRAME_SIZE , m_width, m_height);
|
||||
MFSetAttributeRatio(outType, MF_MT_FRAME_RATE , 30 , 1 );
|
||||
MFSetAttributeRatio(outType, MF_MT_PIXEL_ASPECT_RATIO, 1 , 1 );
|
||||
|
||||
status = m_mfTransform->SetOutputType(0, outType, 0);
|
||||
SafeRelease(&outType);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the output media type on the H264 encoder MFT", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
IMFMediaTypePtr inType;
|
||||
MFCreateMediaType(&inType);
|
||||
|
||||
inType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
|
||||
inType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
|
||||
inType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
|
||||
inType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
|
||||
|
||||
MFSetAttributeSize(inType, MF_MT_FRAME_SIZE, m_width, m_height);
|
||||
MFSetAttributeRatio(inType, MF_MT_FRAME_RATE, 30, 1);
|
||||
MFSetAttributeRatio(inType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
|
||||
|
||||
status = m_mfTransform->SetInputType(0, inType, 0);
|
||||
SafeRelease(&inType);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to set the input media type on the H264 encoder MFT", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void MFT::H264::DeInitialize()
|
||||
{
|
||||
if (m_mediaEventGen)
|
||||
{
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0);
|
||||
m_mfTransform->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
|
||||
while (WaitForSingleObject(m_shutdownEvent, INFINITE) != WAIT_OBJECT_0) {}
|
||||
m_mfTransform->DeleteInputStream(0);
|
||||
}
|
||||
|
||||
SafeRelease(&m_mediaEventGen);
|
||||
SafeRelease(&m_mfTransform);
|
||||
SafeRelease(&m_mfDeviceManager);
|
||||
|
||||
if (m_encodeEvent)
|
||||
{
|
||||
CloseHandle(m_encodeEvent );
|
||||
CloseHandle(m_shutdownEvent);
|
||||
m_encodeEvent = NULL;
|
||||
m_shutdownEvent = NULL;
|
||||
DeleteCriticalSection(&m_encodeCS);
|
||||
}
|
||||
|
||||
if (m_mfActivation)
|
||||
{
|
||||
m_mfActivation->ShutdownObject();
|
||||
SafeRelease(&m_mfActivation);
|
||||
}
|
||||
}
|
||||
|
||||
unsigned int MFT::H264::Process()
|
||||
{
|
||||
while(true)
|
||||
{
|
||||
// only reset the event if there isn't work pending
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
if (!m_encodeHasData && !m_encodeNeedsData)
|
||||
ResetEvent(m_encodeEvent);
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
|
||||
switch (WaitForSingleObject(m_encodeEvent, 1000))
|
||||
{
|
||||
case WAIT_FAILED:
|
||||
DEBUG_WINERROR("Wait for encode event failed", GetLastError());
|
||||
return H264_EVENT_ERROR;
|
||||
|
||||
case WAIT_ABANDONED:
|
||||
DEBUG_ERROR("Wait abandoned");
|
||||
return H264_EVENT_ERROR;
|
||||
|
||||
case WAIT_TIMEOUT:
|
||||
continue;
|
||||
|
||||
case WAIT_OBJECT_0:
|
||||
break;
|
||||
}
|
||||
|
||||
unsigned int events = 0;
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
if (m_encodeNeedsData) events |= H264_EVENT_NEEDS_DATA;
|
||||
if (m_encodeHasData ) events |= H264_EVENT_HAS_DATA;
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
return H264_EVENT_ERROR;
|
||||
}
|
||||
|
||||
bool MFT::H264::ProvideFrame(ID3D11Texture2DPtr texture)
|
||||
{
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
if (!m_encodeNeedsData)
|
||||
{
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
return false;
|
||||
}
|
||||
m_encodeNeedsData = false;
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
|
||||
HRESULT status;
|
||||
IMFMediaBufferPtr buffer;
|
||||
status = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), texture, 0, FALSE, &buffer);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to create DXGI surface buffer from texture", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
IMF2DBufferPtr imfBuffer(buffer);
|
||||
DWORD length;
|
||||
imfBuffer->GetContiguousLength(&length);
|
||||
buffer->SetCurrentLength(length);
|
||||
SafeRelease(&imfBuffer);
|
||||
|
||||
IMFSamplePtr sample;
|
||||
MFCreateSample(&sample);
|
||||
sample->AddBuffer(buffer);
|
||||
|
||||
status = m_mfTransform->ProcessInput(0, sample, 0);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("Failed to process the input", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
SafeRelease(&sample);
|
||||
SafeRelease(&buffer);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MFT::H264::GetFrame(void * buffer, const size_t bufferSize, unsigned int & dataLen)
|
||||
{
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
if (!m_encodeHasData)
|
||||
{
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
return false;
|
||||
}
|
||||
|
||||
m_encodeHasData = false;
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
|
||||
HRESULT status;
|
||||
MFT_OUTPUT_STREAM_INFO streamInfo;
|
||||
status = m_mfTransform->GetOutputStreamInfo(0, &streamInfo);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("GetOutputStreamInfo", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
DWORD outStatus;
|
||||
MFT_OUTPUT_DATA_BUFFER outDataBuffer;
|
||||
outDataBuffer.dwStreamID = 0;
|
||||
outDataBuffer.dwStatus = 0;
|
||||
outDataBuffer.pEvents = NULL;
|
||||
outDataBuffer.pSample = NULL;
|
||||
|
||||
status = m_mfTransform->ProcessOutput(0, 1, &outDataBuffer, &outStatus);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("ProcessOutput", status);
|
||||
return false;
|
||||
}
|
||||
|
||||
IMFMediaBufferPtr mb;
|
||||
MFCreateAlignedMemoryBuffer((DWORD)bufferSize, MF_128_BYTE_ALIGNMENT, &mb);
|
||||
outDataBuffer.pSample->CopyToBuffer(mb);
|
||||
SafeRelease(&outDataBuffer.pEvents);
|
||||
SafeRelease(&outDataBuffer.pSample);
|
||||
|
||||
BYTE *pixels;
|
||||
DWORD maxLen, curLen;
|
||||
mb->Lock(&pixels, &maxLen, &curLen);
|
||||
memcpySSE(buffer, pixels, curLen);
|
||||
mb->Unlock();
|
||||
SafeRelease(&mb);
|
||||
|
||||
dataLen = curLen;
|
||||
return true;
|
||||
}
|
||||
|
||||
STDMETHODIMP MFT::H264::Invoke(IMFAsyncResult * pAsyncResult)
|
||||
{
|
||||
HRESULT status, evtStatus;
|
||||
MediaEventType meType = MEUnknown;
|
||||
IMFMediaEvent *pEvent = NULL;
|
||||
|
||||
status = m_mediaEventGen->EndGetEvent(pAsyncResult, &pEvent);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("EndGetEvent", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
status = pEvent->GetStatus(&evtStatus);
|
||||
if (FAILED(status))
|
||||
{
|
||||
SafeRelease(&pEvent);
|
||||
DEBUG_WINERROR("GetStatus", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
if (FAILED(evtStatus))
|
||||
{
|
||||
SafeRelease(&pEvent);
|
||||
DEBUG_WINERROR("evtStatus", evtStatus);
|
||||
return evtStatus;
|
||||
}
|
||||
|
||||
status = pEvent->GetType(&meType);
|
||||
if (FAILED(status))
|
||||
{
|
||||
SafeRelease(&pEvent);
|
||||
DEBUG_WINERROR("GetType", status);
|
||||
return status;
|
||||
}
|
||||
SafeRelease(&pEvent);
|
||||
|
||||
switch (meType)
|
||||
{
|
||||
case METransformNeedInput:
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
m_encodeNeedsData = true;
|
||||
SetEvent(m_encodeEvent);
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
break;
|
||||
|
||||
case METransformHaveOutput:
|
||||
EnterCriticalSection(&m_encodeCS);
|
||||
m_encodeHasData = true;
|
||||
SetEvent(m_encodeEvent);
|
||||
LeaveCriticalSection(&m_encodeCS);
|
||||
break;
|
||||
|
||||
case METransformDrainComplete:
|
||||
{
|
||||
status = m_mfTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("MFT_MESSAGE_COMMAND_FLUSH", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
SetEvent(m_shutdownEvent);
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
case MEError:
|
||||
DEBUG_INFO("err");
|
||||
break;
|
||||
|
||||
default:
|
||||
DEBUG_INFO("unk");
|
||||
break;
|
||||
}
|
||||
|
||||
status = m_mediaEventGen->BeginGetEvent(this, NULL);
|
||||
if (FAILED(status))
|
||||
{
|
||||
DEBUG_WINERROR("BeginGetEvent", status);
|
||||
return status;
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
101
host/MFT/H264.h
Normal file
101
host/MFT/H264.h
Normal file
@ -0,0 +1,101 @@
|
||||
/*
|
||||
Looking Glass - KVM FrameRelay (KVMFR) Client
|
||||
Copyright (C) 2017 Geoffrey McRae <geoff@hostfission.com>
|
||||
https://looking-glass.hostfission.com
|
||||
|
||||
This program is free software; you can redistribute it and/or modify it under
|
||||
the terms of the GNU General Public License as published by the Free Software
|
||||
Foundation; either version 2 of the License, or (at your option) any later
|
||||
version.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along with
|
||||
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
|
||||
Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#define W32_LEAN_AND_MEAN
|
||||
#include <windows.h>
|
||||
#include <shlwapi.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include "Com.h"
|
||||
|
||||
namespace MFT
|
||||
{
|
||||
enum H264_Event
|
||||
{
|
||||
H264_EVENT_ENCODE = 0x01,
|
||||
H264_EVENT_NEEDS_DATA = 0x04,
|
||||
H264_EVENT_HAS_DATA = 0x08,
|
||||
H264_EVENT_ERROR = 0x10
|
||||
};
|
||||
|
||||
class H264: public IMFAsyncCallback
|
||||
{
|
||||
public:
|
||||
H264();
|
||||
~H264();
|
||||
bool Initialize(ID3D11DevicePtr device, unsigned int width, unsigned int height);
|
||||
void DeInitialize();
|
||||
unsigned int Process();
|
||||
bool ProvideFrame(ID3D11Texture2DPtr texture);
|
||||
bool GetFrame(void * buffer, const size_t bufferSize, unsigned int & dataLen);
|
||||
|
||||
ID3D11DevicePtr m_device;
|
||||
unsigned int m_width;
|
||||
unsigned int m_height;
|
||||
|
||||
HANDLE m_encodeEvent;
|
||||
HANDLE m_shutdownEvent;
|
||||
bool m_encodeNeedsData;
|
||||
bool m_encodeHasData;
|
||||
CRITICAL_SECTION m_encodeCS;
|
||||
|
||||
UINT m_resetToken;
|
||||
IMFDXGIDeviceManagerPtr m_mfDeviceManager;
|
||||
IMFActivatePtr m_mfActivation;
|
||||
IMFTransformPtr m_mfTransform;
|
||||
IMFMediaEventGeneratorPtr m_mediaEventGen;
|
||||
|
||||
/*
|
||||
Junk needed for the horrid IMFAsyncCallback interface
|
||||
*/
|
||||
STDMETHODIMP QueryInterface(REFIID riid, void ** ppv)
|
||||
{
|
||||
if (riid == __uuidof(IUnknown) || riid == __uuidof(IMFAsyncCallback)) {
|
||||
*ppv = static_cast<IMFAsyncCallback*>(this);
|
||||
AddRef();
|
||||
return S_OK;
|
||||
}
|
||||
else {
|
||||
*ppv = NULL;
|
||||
return E_NOINTERFACE;
|
||||
}
|
||||
}
|
||||
|
||||
STDMETHODIMP_(ULONG) AddRef()
|
||||
{
|
||||
return InterlockedIncrement(&m_cRef);
|
||||
}
|
||||
|
||||
STDMETHODIMP_(ULONG) Release()
|
||||
{
|
||||
long cRef = InterlockedDecrement(&m_cRef);
|
||||
if (!cRef)
|
||||
delete this;
|
||||
return cRef;
|
||||
}
|
||||
|
||||
STDMETHODIMP GetParameters(DWORD *pdwFlags, DWORD *pdwQueue) { return E_NOTIMPL; }
|
||||
STDMETHODIMP Invoke(IMFAsyncResult *pAsyncResult);
|
||||
|
||||
private:
|
||||
long m_cRef;
|
||||
};
|
||||
};
|
@ -336,6 +336,7 @@
|
||||
<ClCompile Include="CrashHandler.cpp" />
|
||||
<ClCompile Include="ivshmem.cpp" />
|
||||
<ClCompile Include="main.cpp" />
|
||||
<ClCompile Include="MFT\H264.cpp" />
|
||||
<ClCompile Include="Service.cpp" />
|
||||
<ClCompile Include="TraceUtil.cpp" />
|
||||
</ItemGroup>
|
||||
@ -343,9 +344,11 @@
|
||||
<ClInclude Include="CaptureFactory.h" />
|
||||
<ClInclude Include="Capture\DXGI.h" />
|
||||
<ClInclude Include="Capture\NvFBC.h" />
|
||||
<ClInclude Include="Com.h" />
|
||||
<ClInclude Include="CrashHandler.h" />
|
||||
<ClInclude Include="ICapture.h" />
|
||||
<ClInclude Include="ivshmem.h" />
|
||||
<ClInclude Include="MFT\H264.h" />
|
||||
<ClInclude Include="Service.h" />
|
||||
<ClInclude Include="TraceUtil.h" />
|
||||
<ClInclude Include="Util.h" />
|
||||
|
@ -19,6 +19,12 @@
|
||||
<Filter Include="Source Files\Capture">
|
||||
<UniqueIdentifier>{0a865d22-907e-44ea-a230-ad7ede7edeb0}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Source Files\MFT">
|
||||
<UniqueIdentifier>{bd9c6e76-f398-49eb-acfb-3f50cd99724c}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Header Files\MFT">
|
||||
<UniqueIdentifier>{fead4000-1954-4480-8ee7-b817d7042761}</UniqueIdentifier>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="ivshmem.cpp">
|
||||
@ -45,6 +51,9 @@
|
||||
<ClCompile Include="TraceUtil.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="MFT\H264.cpp">
|
||||
<Filter>Source Files\MFT</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="ivshmem.h">
|
||||
@ -74,6 +83,12 @@
|
||||
<ClInclude Include="TraceUtil.h">
|
||||
<Filter>Header Files</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="MFT\H264.h">
|
||||
<Filter>Header Files\MFT</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="Com.h">
|
||||
<Filter>Header Files</Filter>
|
||||
</ClInclude>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<MASM Include="..\common\memcpySSE.asm">
|
||||
|
Loading…
Reference in New Issue
Block a user