Media (Intel® Video Processing Library, Intel Media SDK)
Access community support with transcoding, decoding, and encoding in applications using media tools like Intel® oneAPI Video Processing Library and Intel® Media SDK
Announcements
The Intel Media SDK project is no longer active. For continued support and access to new features, Intel Media SDK users are encouraged to read the transition guide on upgrading from Intel® Media SDK to Intel® Video Processing Library (VPL), and to move to VPL as soon as possible.
For more information, see the VPL website.
3073 Discussions

Intel® Quick Sync Video H.264 Encoder, program blocked and produces no data

will_w_1
Beginner
1,580 Views

Hi

I'm having problems running Intel® Quick Sync Video H.264 Encoder MFT, when the hardware MFT sents METransformHaveOutput(EventType = 602) event, the sample from ProcessOutput is NULL, and the program is blocked in this state. Any Any idea what could be wrong? or are there some examples of hardware MFT? 

Here are the outputs

Hardware URL Attribute:AA243E5D-2F73-48c7-97F7-F6FA17651651.
Hardware Friendly Name:Intel?Quick Sync Video H.264 Encoder MFT.
Frame 1
EventType = 601
timeStamp = 0
duration = 49816800
EventType = 601
EventType = 602

 Following are parts of my code, and program is blocked at WaitForSingleObject(mEventDrainComplete, INFINITE), the MFT have already turned to METransformHaveOutput while the sample from ProcessOutput is NULL.

#include "hw_enc_common.h"
#include "MpegEncoder_i.h"

#pragma unmanaged

MpegEncoder::MpegEncoder()
{
    mEventHaveInput = CreateEvent(NULL, FALSE, FALSE, NULL);
    mEventNeedInput = CreateEvent(NULL, FALSE, FALSE, NULL);
    mEventDrainComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
}

MpegEncoder::~MpegEncoder()
{
    CloseHandle(mEventHaveInput);
    CloseHandle(mEventNeedInput);
    CloseHandle(mEventDrainComplete);
}

bool MpegEncoder::Initialize()
{
    bool res;

    //try
    {
        HRESULT hr;
        TESTHR(hr = MFStartup(MF_VERSION));
        res = true;
    }
    //catch(com_error ex)
    //{
    //    res = false;
    //}

    return res;
}

bool MpegEncoder::Create(int width, int height, int rate)
{
    bool res;
    HRESULT hr;

    error = false;

    mRate = rate;
    mWidth = width;
    mHeight = height;
    mpWriter = NULL;

    //CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
    //MFStartup(MF_VERSION);

    uint64_t frame_rate = uint64_t(rate) << 32 | 1;
    uint64_t frame_size = uint64_t(width) << 32 | height;

    //mTrace = new CObTrace(L"H264Encoder_#.log");
    //mTrace->SetLogByDate();

    //try
    {
        IMFMediaTypePtr pType1;
        MFCreateMediaType(&pType1);
        TESTHR(hr = pType1->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
        TESTHR(hr = pType1->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_I420));
        TESTHR(hr = pType1->SetUINT32(MF_MT_INTERLACE_MODE, 2/*MFVideoInterlaceMode::MFVideoInterlace_Progressive*/));
        TESTHR(hr = pType1->SetUINT64(MF_MT_FRAME_RATE, frame_rate));
        TESTHR(hr = pType1->SetUINT64(MF_MT_FRAME_SIZE, frame_size));

        IMFMediaTypePtr    pType2;
        MFCreateMediaType(&pType2);
        TESTHR(hr = pType2->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
        TESTHR(hr = pType2->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12));
        TESTHR(hr = pType2->SetUINT32(MF_MT_INTERLACE_MODE, 2/*MFVideoInterlaceMode::MFVideoInterlace_Progressive*/)); 
        TESTHR(hr = pType2->SetUINT64(MF_MT_FRAME_RATE, frame_rate));
        TESTHR(hr = pType2->SetUINT64(MF_MT_FRAME_SIZE, frame_size));

        IMFMediaTypePtr    pType3;
        MFCreateMediaType(&pType3);
        TESTHR(hr = pType3->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
        TESTHR(hr = pType3->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
        TESTHR(hr = pType3->SetUINT32(MF_MT_AVG_BITRATE, 100000000));
        TESTHR(hr = pType3->SetUINT64(MF_MT_FRAME_RATE, frame_rate));
        TESTHR(hr = pType3->SetUINT64(MF_MT_FRAME_SIZE, frame_size));
        TESTHR(hr = pType3->SetUINT32(MF_MT_INTERLACE_MODE, 2/*MFVideoInterlaceMode::MFVideoInterlace_Progressive*/));
        TESTHR(hr = pType3->SetUINT32(MF_MT_MPEG2_PROFILE, 66/*eAVEncH264VProfile::eAVEncH264VProfile_Main*/));
        TESTHR(hr = pType3->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));

        ///////////////////////////////////color  convert///////////////////////////////////
        IUnknown *spH264EncoderUnk = NULL, *spColorConvertUnk = NULL;
        TESTHR(MFTRegisterLocalByCLSID(
            __uuidof(CColorConvertDMO),
            MFT_CATEGORY_VIDEO_PROCESSOR,
            L"",
            MFT_ENUM_FLAG_SYNCMFT,
            0,
            NULL,
            0,
            NULL
            ));

        // Create Color Convert
        TESTHR(CoCreateInstance(CLSID_CColorConvertDMO, NULL, CLSCTX_INPROC_SERVER,
            IID_IUnknown, (void**)&spColorConvertUnk));

        TESTHR(spColorConvertUnk->QueryInterface(IID_PPV_ARGS(&mpColorConverter)));
        //TESTHR(hr = mpColorConverter.CreateInstance(CLSID_CColorConvertDMO, mpColorConverter));
        TESTHR(hr = mpColorConverter->SetOutputType(0, pType2, 0));
        TESTHR(hr = mpColorConverter->SetInputType(0, pType1, 0));


        ///////////////////////////////////////////h264 encoder//////////////////////////////////////
        uint32_t count = 0;
        IMFActivate **ppActivate = NULL;
        TESTHR( MFTEnumEx(
            MFT_CATEGORY_VIDEO_ENCODER,
            MFT_ENUM_FLAG_ASYNCMFT | MFT_ENUM_FLAG_HARDWARE    | MFT_ENUM_FLAG_LOCALMFT,
            NULL,       // Input type
            NULL,       // Output type
            &ppActivate,
            &count
            ));
        if (SUCCEEDED(hr) && count == 0)
        {
            hr = MF_E_TOPO_CODEC_NOT_FOUND;
            printf("H264 Encoder MF_E_TOPO_CODEC_NOT_FOUND\n");
        }
        // Create the first encoder in the list.
        if (SUCCEEDED(hr))
        {
            LPWSTR hardwareName = NULL;            
            TESTHR(ppActivate[0]->GetAllocatedString(MFT_ENUM_HARDWARE_URL_Attribute, &hardwareName,  NULL));
            wprintf(L"Hardware URL Attribute:%s.\n", hardwareName);
            TESTHR(ppActivate[0]->GetAllocatedString(MFT_FRIENDLY_NAME_Attribute, &hardwareName, NULL));
            wprintf(L"Hardware Friendly Name:%s.\n", hardwareName);
            TESTHR(ppActivate[0]->ActivateObject(IID_PPV_ARGS(&mpH264Encoder)));        
        }
        for (UINT32 i = 0; i < count; i++)
        {
            ppActivate->Release();
        }
        CoTaskMemFree(ppActivate);

        //TESTHR(hr = mpH264Encoder.CreateInstance(L"{4BE8D3C0-0515-4A37-AD55-E4BAE19AF471}", mpH264Encoder));

        IMFAttributesPtr pAttributes;
        TESTHR(hr = mpH264Encoder->GetAttributes(&pAttributes));
        TESTHR(hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE));
        TESTHR(hr = pAttributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE));
        TESTHR(hr = mpH264Encoder->SetOutputType(0, pType3, 0));
        TESTHR(hr = mpH264Encoder->SetInputType(0, pType2, 0));

        TESTHR(hr = mpH264Encoder->QueryInterface(IID_IMFMediaEventGenerator, (void**)&mpH264EncoderEventGenerator));
        TESTHR(hr = mpH264EncoderEventGenerator->BeginGetEvent(this, NULL));

        mpWriter = NULL;

        res = true;
    }
    //catch (com_error ex)
    //{
    //    mTrace->Trace(1, L"Exception in %s(%d): 0x%08X", ex.GetFilename(), ex.GetLinenum(), ex.Error());
    //    res = false;
    //}

    return res;
}

bool MpegEncoder::Open(const wchar_t *filename)
{
    bool res;
    HRESULT hr;

    //mTrace->Trace(1, L"New file: %s", filename);

    //try
    {
        mFilename = filename;
        mpWriter = NULL;

        ResetEvent(mEventHaveInput);
        ResetEvent(mEventNeedInput);
        ResetEvent(mEventDrainComplete);
        
        TESTHR(hr = mpH264Encoder->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0));

        res = true;
    }
    //catch (com_error ex)
    //{
    //    mTrace->Trace(1, L"Exception in %s(%d): 0x%08X", ex.GetFilename(), ex.GetLinenum(), ex.Error());
    //    res = false;
    //}

    return res;
}

bool MpegEncoder::Encode(uint8_t * image, uint32_t size, uint64_t timestamp, uint64_t duration)
{
    bool res;
    //try
    {
        this->image = image;
        this->size = size;
        this->timestamp = timestamp;
        this->duration = duration;

        //mTrace->Trace(1, L"New image");

        SetEvent(mEventHaveInput);

        WaitForSingleObject(mEventNeedInput, INFINITE);

        this->image= nullptr;

        res = !error;        
    }
    //catch (com_error ex)
    //{
    //    mTrace->Trace(1, L"Exception in %s(%d): 0x%08X", ex.GetFilename(), ex.GetLinenum(), ex.Error());
    //    res = false;
    //}

    return res;
}

bool MpegEncoder::Close()
{
    bool res;
    //try
    {
        HRESULT hr;

        //mTrace->Trace(1, L"End file");

        // Retrieve the last samples that might by in the encoder
        TESTHR(hr = mpH264Encoder->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0));
        TESTHR(hr = mpH264Encoder->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0));

        SetEvent(mEventHaveInput);

        printf("gg\n");

        WaitForSingleObject(mEventDrainComplete, INFINITE);

        printf("hh\n");

        TESTHR(hr = mpWriter->Finalize());

        mpWriter = NULL;
        newFile = true;

        res = true;
    }
    //catch (com_error ex)
    //{
    //    mTrace->Trace(1, L"Exception in %s(%d): 0x%08X", ex.GetFilename(), ex.GetLinenum(), ex.Error());
    //    res = false;
    //}

    return res;
}

STDMETHODIMP MpegEncoder::Invoke(IMFAsyncResult* pAsyncResult)
{
    HRESULT hr;
    HRESULT hStatus;
    IMFMediaEventPtr pEvent;
    MediaEventType meType;

    //try
    {
        TESTHR(hr = mpH264EncoderEventGenerator->EndGetEvent(pAsyncResult, &pEvent));
        TESTHR(hr = pEvent->GetType(&meType));
        TESTHR(hr = pEvent->GetStatus(&hStatus));
        printf("EventType = %d\n", meType);
        if (hStatus == S_OK)
        {
            if (meType == METransformNeedInput)
            {
                HRESULT hr;
                BYTE *pbBuffer;
                DWORD status;
                IMFSamplePtr pYUVSample, pNV12Sample;
                IMFMediaBufferPtr pYUVBuffer, pNV12Buffer;
                MFT_OUTPUT_STREAM_INFO streaminfo;

                //mTrace->Trace(1, L"New METransformNeedInput event");

                WaitForSingleObject(mEventHaveInput, INFINITE);

                if (image != NULL)
                {
                    TESTHR(hr = MFCreateMemoryBuffer(size, &pYUVBuffer));
                    TESTHR(hr = pYUVBuffer->Lock(&pbBuffer, NULL, NULL));
                    TESTHR(hr = MFCopyImage(pbBuffer, mWidth , image, mWidth , mWidth, mHeight*3/2));
                    TESTHR(hr = pYUVBuffer->SetCurrentLength(size));
                    TESTHR(hr = pYUVBuffer->Unlock());
                    TESTHR(hr = MFCreateSample(&pYUVSample));
                    TESTHR(hr = pYUVSample->AddBuffer(pYUVBuffer));
                    TESTHR(hr = pYUVSample->SetSampleDuration(duration));
                    TESTHR(hr = pYUVSample->SetSampleTime(timestamp));
                    TESTHR(hr = mpColorConverter->ProcessInput(0, pYUVSample, 0));

                    MFT_OUTPUT_DATA_BUFFER nv12OutputDataBuffer;
                    ZeroMemory(&nv12OutputDataBuffer, sizeof(nv12OutputDataBuffer));
                    TESTHR(hr = mpColorConverter->GetOutputStreamInfo(0, &streaminfo));
                    TESTHR(hr = MFCreateSample(&pNV12Sample));
                    TESTHR(hr = MFCreateMemoryBuffer(streaminfo.cbSize, &pNV12Buffer));
                    TESTHR(hr = pNV12Sample->AddBuffer(pNV12Buffer));
                    nv12OutputDataBuffer.pSample = pNV12Sample;
                    TESTHR(hr = mpColorConverter->ProcessOutput(0, 1, &nv12OutputDataBuffer, &status));

                    if (newFile)
                    {
                        //mTrace->Trace(1, L"Set MFSampleExtension_Discontinuity");
                        TESTHR(hr = nv12OutputDataBuffer.pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE));
                        newFile = false;
                    }
                    TESTHR(hr = mpH264Encoder->ProcessInput(0, nv12OutputDataBuffer.pSample, 0));
                }
                SetEvent(mEventNeedInput);
            }
            else if (meType == METransformHaveOutput)
            {
                DWORD status;
                MFT_OUTPUT_DATA_BUFFER h264OutputDataBuffer;
                MFT_OUTPUT_STREAM_INFO streaminfo;
                TESTHR(hr = mpH264Encoder->GetOutputStreamInfo(0, &streaminfo));

                //mTrace->Trace(1, L"New METransformHaveOutput event");

                ZeroMemory(&h264OutputDataBuffer, sizeof(h264OutputDataBuffer));
                hr = mpH264Encoder->ProcessOutput(0, 1, &h264OutputDataBuffer, &status);
                if (hr == MF_E_TRANSFORM_STREAM_CHANGE)
                {
                    //mTrace->Trace(1, L"New MF_E_TRANSFORM_STREAM_CHANGE event");
                    if (h264OutputDataBuffer.dwStatus & MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE)
                    {
                        //mTrace->Trace(1, L"New MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE event");
                        // El encoder dice que el formato ha cambiado y necesita que lo configuremos de nuevo
                        // Leemos el tipo que tiene configurado y se lo volvemos a escribir.
                        IMFMediaTypePtr pType;
                        TESTHR(hr = mpH264Encoder->GetOutputAvailableType(0, 0, &pType));
                        TESTHR(hr = mpH264Encoder->SetOutputType(0, pType, 0));
                    }
                }
                else if (hr == S_OK)
                {
                    if (mpWriter == NULL)
                    {
                        IMFMediaTypePtr pType;
                        TESTHR(hr = mpH264Encoder->GetOutputAvailableType(0, 0, &pType));

                        IMFByteStreamPtr pByteStream;
                        IMFMediaSinkPtr pMediaSink;
                        TESTHR(hr = MFCreateFile(MF_ACCESSMODE_READWRITE, MF_OPENMODE_DELETE_IF_EXIST, MF_FILEFLAGS_NONE, mFilename.c_str(), &pByteStream));
                        TESTHR(hr = MFCreateMPEG4MediaSink(pByteStream, pType, NULL, &pMediaSink));
                        TESTHR(hr = MFCreateSinkWriterFromMediaSink(pMediaSink, NULL, &mpWriter));
                        TESTHR(hr = mpWriter->BeginWriting());
                    }
                    TESTHR(hr = mpWriter->WriteSample(0, h264OutputDataBuffer.pSample));
                    h264OutputDataBuffer.pSample->Release();
                    if (h264OutputDataBuffer.pEvents != NULL)
                        h264OutputDataBuffer.pEvents->Release();
                }
                else
                    TESTHR(hr);
            }
            else if (meType == METransformDrainComplete)
            {
                //mTrace->Trace(1, L"New METransformDrainComplete event");
                TESTHR(hr = mpH264Encoder->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0));
                SetEvent(mEventDrainComplete);
            }
            else if (meType == MEError)
            {
                PROPVARIANT pValue;
                TESTHR(hr = pEvent->GetValue(&pValue));
                //mTrace->Trace(1, L"MEError, value: %u", pValue.vt);
                error = true;
                SetEvent(mEventNeedInput);
            }
            else 
            {
                PROPVARIANT pValue;
                TESTHR(hr = pEvent->GetValue(&pValue));
                //mTrace->Trace(1, L"Unknown event type: %lu, Value: %u", meType, pValue.vt);
            }
            TESTHR(hr = mpH264EncoderEventGenerator->BeginGetEvent(this, NULL));
        }
    }
    //catch(com_error ex)
    //{
    //    printf("Exception in %s(%d): 0x%08X\n", ex.GetFilename(), ex.GetLinenum(), ex.Error());
    //}

    return S_OK;
}

Here is the information of my computer

CPU:Intel(R) Core(TM) i7-4790

GPU:Intel HD 4600 with device version 10.18.15.4279

         AMD Radeon(TM) R5 240 with device version 20.19.0.32832

0 Kudos
4 Replies
Jiandong_Z_Intel
Employee
1,580 Views

Hi There,

Does MSDK sample_encode work for you ? And what is your system configuration ?

 

Thanks

Zachary

0 Kudos
will_w_1
Beginner
1,580 Views

Hi, Zachary

My system configuration is

Sys: Win10 Pro

CPU:Intel(R) Core(TM) i7-4790 

GPU:Intel HD 4600 with device version 10.18.15.4279

         AMD Radeon(TM) R5 240 with device version 20.19.0.32832

And where can I find MSDK sample_encode. My local MSDK folder doesn't contain such sample.

Thanks.

 

0 Kudos
Jiandong_Z_Intel
Employee
1,580 Views

Hi Will,

MSDK samples can be download from: https://software.intel.com/en-us/media-sdk-support/code-samples

 

Thanks,

Zachary

 

 

 

0 Kudos
will_w_1
Beginner
1,580 Views

Hi, Zachary

I've successfully called Intel Quick Sync H.264 Encoder and ICodecAPI, and I can dynamicly change video resolution, frame rate, profile. But when it comes to change bit rate, the .264 bitstream illustrates that the configure doesn't work. Same method works well in windows software H264 Encoder MFT. 

Here is the ICodecAPI I uesd

CODECAPI_AVEncCommonRateControlMode = eAVEncCommonRateControlMode_CBR

CODECAPI_AVEncCommonMeanBitRate = 2000000

So how can I dynamicly change bit rate in Intel Quick Sync H.264 Encoder.

Thanks a lot.

0 Kudos
Reply