- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
Hi All,
I'm having problem with the HEVC decoder using Windows MFT and DirectX 9 on the newest Intel driver for Intel GPU HD 530 (CPU i-7 6700K). The problem occurs only with 10bit HEVC video, there is no problem with 8bit video, it decodes perfectly.
I've tested decoder behaviour on previous driver versions as well as on the Intel HD 620 (Kaby Lake):
Intel HD Graphics 620 - 2017-04-21 (21.20.16.4664)
Intel HD Graphics 530 - 2015-12-15 (20.19.15.4352)
Intel HD Graphics 530 - 2017-05-17 (21.20.16.4678)
and it's working with no problems.
On version: "Intel HD Graphics 530 13.08.2017 (22.20.16.4771)" the decoder hangs unfortunately.
I'm attaching zipped test app (MSVC2015 project with test files). This test app is modified H.264 sample app. It decodes HEVC frames and dumps them to raw P010 frames file. The app hangs on line:
hr = pdxSurfaceCopy->LockRect(&rect, NULL, D3DLOCK_READONLY);
It seems that after "ProcessOutput" call when I do something on DirectX 9 device, app hangs.
Problem can be also reproduced when after single successful "ProcessOutput" instead of "dumpSurfaceToFile(pSurface, outputBuffer);" call, few lines are added:
IDirect3DDevice9* device = NULL; pSurface->GetDevice(&device); device->ColorFill(pSurface, NULL, D3DCOLOR_ARGB(100, 100, 100, 100));
Another call to "ProcessOutput" freezes.
Here's the code of decoder:
// MftTestApp.cpp : Defines the entry point for the console application. // #include "stdafx.h" /// Filename: MFMP4ToYUVWithMFT.cpp /// /// Description: /// This file contains a C++ console application that reads H265 encoded video frames from an mp4 file and decodes /// them to a YUV pixel format and dumps them to an output file stream. /// /// To convert the raw yuv data dumped at the end of this sample use the ffmpeg command below: /// ffmpeg -vcodec rawvideo -s 640x360 -pix_fmt yuv420p -i rawframes.yuv -vframes 1 output.jpeg /// ffmpeg -vcodec rawvideo -s 640x360 -pix_fmt yuv420p -i rawframes.yuv out.avi /// /// History: /// 08 Mar 2015 Aaron Clauson (aaron@sipsorcery.com) Created. /// /// License: Public #include <stdio.h> #include <tchar.h> #include <evr.h> #include <mfapi.h> #include <mfplay.h> #include <mfreadwrite.h> #include <mferror.h> #include <wmcodecdsp.h> #include <wmcodecdsp.h> #include <Mfidl.h> #include <fstream> #include <codecapi.h> #include <D3d9.h> #include <Dxva2api.h> //#define _8BIT_VIDEO_TEST #define _10BIT_VIDEO_TEST #ifdef _8BIT_VIDEO_TEST const int SAMPLE_COUNT = 10; const int VIDEO_SAMPLE_WIDTH = 1920; const int VIDEO_SAMPLE_HEIGHT = 1080; const int VIDEO_SAMPLE_HEIGHT_BIT_DEPTH = 8; char* OUTPUT_FILE = "frames8bit.bin"; LPCWSTR INPUT_FILE = L"jellyfish8bit.mp4"; #endif #ifdef _10BIT_VIDEO_TEST const int SAMPLE_COUNT = 10; const int VIDEO_SAMPLE_WIDTH = 1920; const int VIDEO_SAMPLE_HEIGHT = 1080; const int VIDEO_SAMPLE_HEIGHT_BIT_DEPTH = 10; char* OUTPUT_FILE = "frames10bit.bin"; LPCWSTR INPUT_FILE = L"jellyfish10bit.mp4"; #endif void CHECK_HR(HRESULT hr, char* str) { if (hr != S_OK) { printf(str); printf("Error: %.2X.\n", hr); exit(-1); } } void setInputType(IMFTransform* pDecoderTransform) { GUID preferredFormats[2] = { MFVideoFormat_HEVC_ES, MFVideoFormat_HEVC }; for (int i = 0; i < 2; ++i) { int idx = 0; while (true) { IMFMediaType* availableInputType; HRESULT hr = pDecoderTransform->GetInputAvailableType(0, idx++, &availableInputType); CHECK_HR(hr, "GetInputAvailableType Failed"); GUID inputFormat; availableInputType->GetGUID(MF_MT_SUBTYPE, &inputFormat); if (IsEqualGUID(inputFormat, preferredFormats)) { MFSetAttributeSize(availableInputType, MF_MT_FRAME_SIZE, VIDEO_SAMPLE_WIDTH, VIDEO_SAMPLE_HEIGHT); availableInputType->SetUINT32(MF_MT_MPEG2_PROFILE, 2/*eAVEncH265VProfile_Main_420_10*/); hr = pDecoderTransform->SetInputType(0, availableInputType, 0); CHECK_HR(hr, "SetInputType failed"); return; } } } } void setOutputType(IMFTransform* pDecoderTransform) { GUID prefferedFormats[3] = { MFVideoFormat_P010, MFVideoFormat_NV12, MFVideoFormat_I420 }; for (int i = 0; i < 3; ++i) { int idx = 0; while (true) { IMFMediaType* availableOutputType = NULL; HRESULT hr = pDecoderTransform->GetOutputAvailableType(0, idx++, &availableOutputType); if (MF_E_NO_MORE_TYPES == hr) { break; } CHECK_HR(hr, "GetOutputAvailableType Failed"); GUID outputFormat; availableOutputType->GetGUID(MF_MT_SUBTYPE, &outputFormat); if (IsEqualGUID(outputFormat, prefferedFormats)) { hr = pDecoderTransform->SetOutputType(0, availableOutputType, 0); CHECK_HR(hr, "SetOutputType Failed"); return; } } } } void dumpSurfaceToFile(IDirect3DSurface9 *pdxSurface, std::ofstream& outputBuffer) { HRESULT hr = S_OK; D3DLOCKED_RECT rect; IDirect3DSurface9 *pdxSurfaceCopy = NULL; D3DSURFACE_DESC sourceDesc; IDirect3DDevice9* device = NULL; pdxSurface->GetDevice(&device); hr = pdxSurface->GetDesc(&sourceDesc); CHECK_HR(hr, "Can't get source desc"); // create second surface used as copy // we need to do it via streach rect to avoid artefacts on Win8 and Intel HD 4600 hr = device->CreateOffscreenPlainSurface(sourceDesc.Width, sourceDesc.Height, sourceDesc.Format, D3DPOOL_DEFAULT, &pdxSurfaceCopy, NULL); CHECK_HR(hr, "CreateOffscreenPlainSurface"); // copy content to copy surface using strechRect hr = device->StretchRect(pdxSurface, NULL, pdxSurfaceCopy, NULL, D3DTEXF_NONE); CHECK_HR(hr, "StretchRect"); // lock copy surface rectangle hr = pdxSurfaceCopy->LockRect(&rect, NULL, D3DLOCK_READONLY); CHECK_HR(hr, "pdxSurfaceCopy->LockRect"); // copy data from rectangle const int nv12FrameHeight = VIDEO_SAMPLE_HEIGHT + (VIDEO_SAMPLE_HEIGHT >> 1); size_t rowSize = VIDEO_SAMPLE_WIDTH * (VIDEO_SAMPLE_HEIGHT_BIT_DEPTH == 8 ? 1 : 2); unsigned char* ptr = static_cast<unsigned char*>(rect.pBits); for (size_t row = 0; row < nv12FrameHeight; ++row) { if (ptr && rowSize > 0) { outputBuffer.write((const char*)ptr, rowSize); //fwrite(ptr, rowSize, 1, dumpFile); } ptr += rect.Pitch; } // unlock surface rect hr = pdxSurfaceCopy->UnlockRect(); // release copy surface if allocated if (pdxSurfaceCopy) { pdxSurfaceCopy->Release(); } } int _tmain(int argc, _TCHAR* argv[]) { std::ofstream outputBuffer(OUTPUT_FILE, std::ios::out | std::ios::binary); IMFSourceResolver *pSourceResolver = NULL; IUnknown* uSource = NULL; IMFMediaSource *mediaFileSource = NULL; IMFAttributes *pVideoReaderAttributes = NULL; IMFSourceReader *pSourceReader = NULL; MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID; IMFMediaType *pFileVideoMediaType = NULL; IUnknown *spDecTransformUnk = NULL; IMFTransform *pDecoderTransform = NULL; // This is H265 Decoder MFT. IMFMediaType *pDecInputMediaType = NULL, *pDecOutputMediaType = NULL; DWORD mftStatus = 0; CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); MFStartup(MF_VERSION); // Set up the reader for the file. CHECK_HR(MFCreateSourceResolver(&pSourceResolver), "MFCreateSourceResolver failed.\n"); CHECK_HR(pSourceResolver->CreateObjectFromURL( INPUT_FILE, // URL of the source. MF_RESOLUTION_MEDIASOURCE, // Create a source object. NULL, // Optional property store. &ObjectType, // Receives the created object type. &uSource // Receives a pointer to the media source. ), "Failed to create media source resolver for file.\n"); CHECK_HR(uSource->QueryInterface(IID_PPV_ARGS(&mediaFileSource)), "Failed to create media file source.\n"); CHECK_HR(MFCreateAttributes(&pVideoReaderAttributes, 2), "Failed to create attributes object for video reader.\n"); CHECK_HR(pVideoReaderAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID), "Failed to set dev source attribute type for reader config.\n"); CHECK_HR(pVideoReaderAttributes->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, 1), "Failed to set enable video processing attribute type for reader config.\n"); CHECK_HR(MFCreateSourceReaderFromMediaSource(mediaFileSource, pVideoReaderAttributes, &pSourceReader), "Error creating media source reader.\n"); CHECK_HR(pSourceReader->GetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pFileVideoMediaType), "Error retrieving current media type from first video stream.\n"); // Create H.265 decoder. static const MFT_REGISTER_TYPE_INFO InputTypeInformation = { MFMediaType_Video, MFVideoFormat_HEVC }; IMFActivate** ppActivates; UINT32 nActivateCount = 0; SUCCEEDED(MFTEnumEx(MFT_CATEGORY_VIDEO_DECODER, 0, &InputTypeInformation, NULL, &ppActivates, &nActivateCount)); if (nActivateCount > 0) { SUCCEEDED(ppActivates[0]->ActivateObject(__uuidof(IMFTransform), (VOID**)&spDecTransformUnk)); } CHECK_HR(spDecTransformUnk->QueryInterface(IID_PPV_ARGS(&pDecoderTransform)), "Failed to get IMFTransform interface from H265 decoder MFT object.\n"); MFCreateMediaType(&pDecInputMediaType); CHECK_HR(pFileVideoMediaType->CopyAllItems(pDecInputMediaType), "Error copying media type attributes to decoder input media type.\n"); CHECK_HR(pDecoderTransform->SetInputType(0, pDecInputMediaType, 0), "Failed to set input media type on H.265 decoder MFT.\n"); MFCreateMediaType(&pDecOutputMediaType); // Set DirectX manager HRESULT hr = S_OK; UINT resetToken = 0; D3DPRESENT_PARAMETERS param; IDirect3D9Ex* d3D9; IDirect3DDevice9Ex* device; IDirect3DDeviceManager9* deviceManager; memset(¶m, 0, sizeof(D3DPRESENT_PARAMETERS)); param.Windowed = TRUE; param.SwapEffect = D3DSWAPEFFECT_DISCARD; param.hDeviceWindow = ::GetShellWindow(); param.BackBufferFormat = D3DFMT_X8R8G8B8; hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &d3D9); CHECK_HR(hr, "Cannot create Direct"); hr = d3D9->CreateDeviceEx( D3DADAPTER_DEFAULT , D3DDEVTYPE_HAL , param.hDeviceWindow , D3DCREATE_HARDWARE_VERTEXPROCESSING | D3DCREATE_MULTITHREADED , ¶m , NULL , &device); CHECK_HR(hr, "Unable to create D3D9 device"); hr = DXVA2CreateDirect3DDeviceManager9(&resetToken, &deviceManager); CHECK_HR(hr, "Unable to create D3D9 device manager"); hr = deviceManager->ResetDevice(device, resetToken); CHECK_HR(hr, "Unable to reset device"); hr = pDecoderTransform->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, (ULONG_PTR)deviceManager); CHECK_HR(hr, "Unable to set D3D Manager"); // Set Input setInputType(pDecoderTransform); // Set Output setOutputType(pDecoderTransform); CHECK_HR(pDecoderTransform->GetInputStatus(0, &mftStatus), "Failed to get input status from H.265 decoder MFT.\n"); if (MFT_INPUT_STATUS_ACCEPT_DATA != mftStatus) { printf("H.265 decoder MFT is not accepting data.\n"); goto done; } CHECK_HR(pDecoderTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.265 decoder MFT.\n"); CHECK_HR(pDecoderTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.265 decoder MFT.\n"); CHECK_HR(pDecoderTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.265 decoder MFT.\n"); // Ready to start processing frames. MFT_OUTPUT_DATA_BUFFER outputDataBuffer; DWORD processOutputStatus = 0; IMFSample *videoSample = NULL, *reConstructedVideoSample = NULL; DWORD streamIndex, flags, bufferCount; LONGLONG llVideoTimeStamp = 0, llSampleDuration = 0, yuvVideoTimeStamp = 0, yuvSampleDuration = 0; HRESULT mftProcessInput = S_OK; HRESULT mftProcessOutput = S_OK; MFT_OUTPUT_STREAM_INFO StreamInfo; IMFSample *mftOutSample = NULL; IMFMediaBuffer *pBuffer = NULL, *reConstructedBuffer = NULL; int sampleCount = 0; DWORD mftOutFlags; DWORD sampleFlags; memset(&outputDataBuffer, 0, sizeof outputDataBuffer); while (sampleCount <= SAMPLE_COUNT) { CHECK_HR(pSourceReader->ReadSample( MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), "Error reading video sample."); if (flags & MF_SOURCE_READERF_STREAMTICK) { printf("Stream tick.\n"); } if (videoSample) { printf("Processing sample %i.\n", sampleCount); CHECK_HR(videoSample->SetSampleTime(llVideoTimeStamp), "Error setting the video sample time.\n"); CHECK_HR(videoSample->GetSampleDuration(&llSampleDuration), "Error getting video sample duration.\n"); videoSample->GetSampleFlags(&sampleFlags); videoSample->GetBufferCount(&bufferCount); printf("Sample time %I64d, sample duration %I64d, sample flags %d, buffer count %i.\n", llVideoTimeStamp, llSampleDuration, sampleFlags, bufferCount); // Pass the video sample to the H.265 transform. //CHECK_HR(pDecoderTransform->ProcessInput(0, videoSample, 0), "The H265 decoder ProcessInput call failed.\n"); // Extrtact and then re-construct the sample to simulate processing encoded H265 frames received outside of MF. IMFMediaBuffer *srcBuf = NULL; DWORD srcBufLength; byte *srcByteBuffer; DWORD srcBuffCurrLen = 0; DWORD srcBuffMaxLen = 0; CHECK_HR(videoSample->ConvertToContiguousBuffer(&srcBuf), "ConvertToContiguousBuffer failed.\n"); CHECK_HR(srcBuf->GetCurrentLength(&srcBufLength), "Get buffer length failed.\n"); CHECK_HR(srcBuf->Lock(&srcByteBuffer, &srcBuffMaxLen, &srcBuffCurrLen), "Error locking source buffer.\n"); //// Now re-constuct. MFCreateSample(&reConstructedVideoSample); CHECK_HR(MFCreateMemoryBuffer(srcBufLength, &reConstructedBuffer), "Failed to create memory buffer.\n"); CHECK_HR(reConstructedVideoSample->AddBuffer(reConstructedBuffer), "Failed to add buffer to re-constructed sample.\n"); CHECK_HR(reConstructedVideoSample->SetSampleTime(llVideoTimeStamp), "Error setting the recon video sample time.\n"); CHECK_HR(reConstructedVideoSample->SetSampleDuration(llSampleDuration), "Error setting recon video sample duration.\n"); byte *reconByteBuffer; DWORD reconBuffCurrLen = 0; DWORD reconBuffMaxLen = 0; CHECK_HR(reConstructedBuffer->Lock(&reconByteBuffer, &reconBuffMaxLen, &reconBuffCurrLen), "Error locking recon buffer.\n"); memcpy(reconByteBuffer, srcByteBuffer, srcBuffCurrLen); CHECK_HR(reConstructedBuffer->Unlock(), "Error unlocking recon buffer.\n"); reConstructedBuffer->SetCurrentLength(srcBuffCurrLen); CHECK_HR(srcBuf->Unlock(), "Error unlocking source buffer.\n"); CHECK_HR(pDecoderTransform->ProcessInput(0, reConstructedVideoSample, 0), "The H265 decoder ProcessInput call failed.\n"); CHECK_HR(pDecoderTransform->GetOutputStatus(&mftOutFlags), "H265 MFT GetOutputStatus failed.\n"); //if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY) //{ CHECK_HR(pDecoderTransform->GetOutputStreamInfo(0, &StreamInfo), "Failed to get output stream info from H265 MFT.\n"); CHECK_HR((StreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) == 1, "Output stream doesn't provide samples"); bool process = true; while (process) { outputDataBuffer.dwStreamID = 0; outputDataBuffer.dwStatus = 0; outputDataBuffer.pEvents = NULL; outputDataBuffer.pSample = NULL; mftProcessOutput = pDecoderTransform->ProcessOutput(0, 1, &outputDataBuffer, &processOutputStatus); switch (mftProcessOutput) { case S_OK: { IDirect3DSurface9 *pSurface = NULL; // get media bufer hr = outputDataBuffer.pSample->GetBufferByIndex(0, &pBuffer); CHECK_HR(hr, "GetBufferByIndex failed"); // allocate surface hr = MFGetService(pBuffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&pSurface)); CHECK_HR(hr, "MFGetService allocate surface failed"); dumpSurfaceToFile(pSurface, outputBuffer); outputBuffer.flush(); outputDataBuffer.pSample->Release(); pBuffer->Release(); sampleCount++; break; } case MF_E_TRANSFORM_STREAM_CHANGE: // Set Output setOutputType(pDecoderTransform); break; case MF_E_TRANSFORM_NEED_MORE_INPUT: process = false; break; case E_FAIL: break; default: process = false; break; } } } } outputBuffer.close(); done: printf("finished.\n"); getchar(); return 0; }
Thanks for help.
Link Copied
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
Have you tried the latest driver release to see if is still an issue?
https://downloadcenter.intel.com/download/27680/Intel-Graphics-Driver-for-Windows-10
- Subscribe to RSS Feed
- Mark Topic as New
- Mark Topic as Read
- Float this Topic for Current User
- Bookmark
- Subscribe
- Printer Friendly Page