- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
Hi,
I'm currently in the process of adding realtime h.264 encoding to software I'm working on, and I've run into a weird problem with the Intel QuckSync encoder MFT.
The code snippet below is a minified version of what I'm originally working with. It's a complete encoding test, just compile it as console app in VS2015 and it should write a small file at C:\temp\encodetest.264 that you can look at with any player that supports raw h.264 streams. It has been tested on various Intel HD GPUs in Core i5/i7 machines as well as NV and AMD GPUs under Windows 7 and 10, and its bigger brother also runs with MS' software encoder, so we can probably assume that there are no obvious mistakes in it.
One machine here acts up tho, it's a Dell Precision 7510, i7-6820HQ with Intel HD Graphics 530, Win7 Pro 64bit and drivers version 10.18.15.4287 (in Dell flavor, official drivers won't install and there's nothing newer from Dell, so that's probably the problem already but one can at least hope).
The issue is that the MFT happily eats up every sample I throw at ProcessInput (and never releases the reference to any of them) but never produces any output whatsoever. There's no METransformHaveOutput event ever being fired, and when I try to call ProcessOutput anyway it yields the very much expected E_UNEXPECTED (sorry :)).
Any ideas?
Regards,
Tammo
// Media Foundation h.264 encode test, by Tammo "kb" Hinrichs, 2016-8-31 // This source code file is in the public domain. #include <assert.h> #include <stdio.h> #include <mfidl.h> #include <mfapi.h> #include <mferror.h> #pragma comment(lib, "mfplat.lib") #pragma comment(lib, "mfuuid.lib") #define WIDTH 1280 #define HEIGHT 720 #define FPS 30 #define FILENAME L"c:\\temp\\encodetest.264" // play with MPC-HC :) // render something into an NV12 image buffer void RenderImage(BYTE *mem, int w, int h) { // clear Y with dark grey memset(mem, 0x40, w*h); // clear U,V memset(mem + w*h, 0x80, w*h / 2); // draw a bright square static int x = 0; BYTE *rptr = mem + 100 * w + x; for (int yy = 0; yy < 50; yy++) for (int xx = 0; xx < 50; xx++) rptr[w*yy + xx] = 0xf0; // .. and animate it x += 10; if (x + 50 > w) x = 0; } // IMFAsyncCallback implementation struct EncoderCallbacks : IMFAsyncCallback { EncoderCallbacks(IMFTransform *encoder) { TickEvent = CreateEvent(0, FALSE, FALSE, 0); encoder->QueryInterface(IID_PPV_ARGS(&Gen)); assert(Gen); Gen->BeginGetEvent(this, 0); } ~EncoderCallbacks() { Gen->Release(); CloseHandle(TickEvent); } // dummy IUnknown impl virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void ** ppvObject) override { return E_NOTIMPL; } virtual ULONG STDMETHODCALLTYPE AddRef(void) override { return 1; } virtual ULONG STDMETHODCALLTYPE Release(void) override { return 1; } virtual HRESULT STDMETHODCALLTYPE GetParameters(DWORD * pdwFlags, DWORD * pdwQueue) override { // we return immediately and don't do anything except signaling another thread *pdwFlags = MFASYNC_SIGNAL_CALLBACK; *pdwQueue = MFASYNC_CALLBACK_QUEUE_IO; return S_OK; } virtual HRESULT STDMETHODCALLTYPE Invoke(IMFAsyncResult * pAsyncResult) override { IMFMediaEvent *event = 0; Gen->EndGetEvent(pAsyncResult, &event); if (event) { MediaEventType type; event->GetType(&type); switch (type) { case METransformNeedInput: InterlockedIncrement(&NeedsInput); break; case METransformHaveOutput: InterlockedIncrement(&HasOutput); break; } event->Release(); SetEvent(TickEvent); } Gen->BeginGetEvent(this, 0); return S_OK; } IMFMediaEventGenerator *Gen = nullptr; HANDLE TickEvent; unsigned int NeedsInput = 0; unsigned int HasOutput = 0; }; int main() { HRESULT hr; // initialize MF hr = MFStartup(MF_VERSION); assert(SUCCEEDED(hr)); // enumerate all hardware encoders IMFActivate **ppActivate = NULL; UINT32 count = 0; MFT_REGISTER_TYPE_INFO rtinfo = { MFMediaType_Video, MFVideoFormat_H264 }; MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER, MFT_ENUM_FLAG_HARDWARE, NULL, &rtinfo, &ppActivate, &count); assert(ppActivate); // take first encoder that actually works IMFTransform *encoder = nullptr; for (UINT32 i = 0; i < count; i++) { if (!encoder) ppActivate->ActivateObject(IID_PPV_ARGS(&encoder)); ppActivate->Release(); } assert(encoder); CoTaskMemFree(ppActivate); // unlock async mode of encoder IMFAttributes *encattr = nullptr; UINT32 async = 0; encoder->GetAttributes(&encattr); assert(encattr); encattr->GetUINT32(MF_TRANSFORM_ASYNC, &async); assert(async); encattr->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, 1); // check stream counts just for safety DWORD inCount = 0, outCount = 0; encoder->GetStreamCount(&inCount, &outCount); assert(inCount == 1 && outCount == 1); // get stream ids DWORD inStrmId = 0, outStrmId = 0; encoder->GetStreamIDs(1, &inStrmId, 1, &outStrmId); if (FAILED(hr)) // can happen, stream IDs are 0 then inStrmId = outStrmId = 0; // set media types (hardcoded, but first H264 out and then NV12 in works everywhere) IMFMediaType *outType = nullptr; MFCreateMediaType(&outType); outType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); outType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); outType->SetUINT32(MF_MT_COMPRESSED, 1); outType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); MFSetAttributeSize(outType, MF_MT_FRAME_SIZE, WIDTH, HEIGHT); MFSetAttributeRatio(outType, MF_MT_FRAME_RATE, FPS, 1); MFSetAttributeRatio(outType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); hr = encoder->SetOutputType(outStrmId, outType, 0); assert(SUCCEEDED(hr)); IMFMediaType *inType = nullptr; MFCreateMediaType(&inType); inType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); inType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); inType->SetUINT32(MF_MT_COMPRESSED, 0); inType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); MFSetAttributeSize(inType, MF_MT_FRAME_SIZE, WIDTH, HEIGHT); MFSetAttributeRatio(inType, MF_MT_FRAME_RATE, FPS, 1); MFSetAttributeRatio(inType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); hr = encoder->SetInputType(inStrmId, inType, 0); assert(SUCCEEDED(hr)); // just to be safe that the encoder handles samples the way we need MFT_OUTPUT_STREAM_INFO outInfo; encoder->GetOutputStreamInfo(outStrmId, &outInfo); assert(outInfo.dwFlags & (MFT_OUTPUT_STREAM_PROVIDES_SAMPLES | MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES)); // on your marks... encoder->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); auto callbacks = new EncoderCallbacks(encoder); int todo = 500; // # of frames to encode long long duration = 10ll * 1000ll * 1000ll / ((long long)FPS); // frame duration in 100ns units long long timecode = 0; #ifdef FILENAME HANDLE outFile = CreateFile(FILENAME, GENERIC_WRITE, 0, 0, CREATE_ALWAYS, 0, 0); assert(outFile != INVALID_HANDLE_VALUE); #endif // get set... encoder->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0); // go! while (todo) { // wait for any callback to come in WaitForSingleObject(callbacks->TickEvent, INFINITE); // as long as the encoder wants new input from us... while (callbacks->NeedsInput) { InterlockedDecrement(&callbacks->NeedsInput); printf("send frame @ %.3fs\n", timecode/10000000.0); // let's render a frame of video into an MF buffer IMFMediaBuffer *buffer = nullptr; MFCreateMemoryBuffer(WIDTH * (HEIGHT + HEIGHT/2), &buffer); BYTE *memory = nullptr; buffer->Lock(&memory, 0, 0); RenderImage(memory, WIDTH, HEIGHT); buffer->Unlock(); // ... put it into a sample... IMFSample *sample; MFCreateSample(&sample); sample->AddBuffer(buffer); sample->SetSampleDuration(duration); sample->SetSampleTime(timecode); buffer->Release(); // the sample has the buffer now, we don't need it anymore timecode += duration; // ... and submit it to the encoder. hr = encoder->ProcessInput(inStrmId, sample, 0); assert(SUCCEEDED(hr)); sample->Release(); // the encoder has the sample now, we don't need it anymore } // as long as the encoder has stuff for us to process... while (callbacks->HasOutput) { InterlockedDecrement(&callbacks->HasOutput); DWORD status = 0; MFT_OUTPUT_DATA_BUFFER outdata = { 0 }; outdata.dwStreamID = outStrmId; // get sample from encoder hr = encoder->ProcessOutput(0, 1, &outdata, &status); if (SUCCEEDED(hr)) { // for all buffers in the sample... DWORD bcount; outdata.pSample->GetBufferCount(&bcount); for (DWORD i = 0; i < bcount; i++) { IMFMediaBuffer *buffer = 0; outdata.pSample->GetBufferByIndex(i, &buffer); // get data from buffer BYTE *data; DWORD length; buffer->Lock(&data, 0, &length); printf("got %d bytes\n", length); #ifdef FILENAME DWORD written; WriteFile(outFile, data, length, &written, 0); assert(written == length); // seriously now #endif buffer->Unlock(); buffer->Release(); } } else { // some encoders want to renegotiate the output format. if (hr == MF_E_TRANSFORM_STREAM_CHANGE && (outdata.dwStatus & MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE)) { // Let them know we don't do stuff like that. hr = encoder->SetOutputType(outStrmId, outType, 0); assert(SUCCEEDED(hr)); } else // some other error assert(0); } if (outdata.pSample) outdata.pSample->Release(); if (outdata.pEvents) outdata.pEvents->Release(); if (todo > 0) todo--; } } // we're done here... encoder->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0); encoder->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, 0); #ifdef FILENAME CloseHandle(outFile); #endif // shut down IMFShutdown *shutdown = 0; encoder->QueryInterface(&shutdown); if (shutdown) { shutdown->Shutdown(); shutdown->Release(); } delete callbacks; inType->Release(); outType->Release(); encoder->Release(); encattr->Release(); MFShutdown(); return 0; }
Link Copied
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
Hi Tammo,
Since MFT works with the graphic driver directly, this issue should be not related to the Intel Media SDK. You can try to run the same video content with the MSDK sample encode application on the same machine, this should be easy to figure out if this is the MSDK issue or MFT issue.
Mark
- Subscribe to RSS Feed
- Mark Topic as New
- Mark Topic as Read
- Float this Topic for Current User
- Bookmark
- Subscribe
- Printer Friendly Page