Intel® Integrated Performance Primitives
Deliberate problems developing high-performance vision, signal, security, and storage applications.
6709 Discussions

How to get bitmap from VideoData or VideoRender

franknatoli
New Contributor I
315 Views
For decoding MPEG-4 or H.264 data, need to obtain RGB24 or RGB32 bitmap from VideoDecoder. If I understand correctly, the VideoData object in fact contains the decoded video frame, and the VideoRender object is used to push the decoded video frame into DirectX or a file writer. Have tried calling SetColorFormat to UMC::RGB32 but that appears to be overriden by the actual processing of the data. Calls to GetColorFormat returns UMC::YV12. PlaneInfo indicates three planes, one that is 320x240 and two that are 160x120. Clearly that is not RGB32.

To simply extract the binary bitmap, can I complete delete all calls to render objects and operations?

And how can the VideoData object be coerced to produce an RGB24 or RGB32 bitmap?

Thanks.
0 Kudos
4 Replies
frankjnatoli
Beginner
315 Views
Tried ifzero-ing all render related code, but apparently the VideoDecoder::GetFrame that produces the VideoData output is dependent on the renderer to consume or post-process the data. Only one frame is produced when no renderer is called. Tried both GDIVideoRenderer (with m_hWnd specified) and NULLVideoRenderer, but both fail LockInputBuffer with -997 UMC_ERR_NOT_INITIALIZED. Not any examples in umc-manual.pdf for initializing and using GDIVideoRenderer or NULLVideoRenderer. Any suggestions? Or should I use pipeline and AVSync? Thanks.
0 Kudos
pushkar_p
Beginner
315 Views

In order to coerce the decoder to produce your desired output format initialize the VideoData object to the desired format:

umcRes = m_FrameOut.Init(352, 240, UMC::YUV420); // VideoData m_FrameOut

....

umcRes = m_Decoder.GetFrame(&m_FrameIn, &m_FrameOut);

This works for me and in my customized app we convert the data to RGB because the renderer expects YUV420.

0 Kudos
frankjnatoli
Beginner
315 Views
Perfect. Works great. Many thanks.

I see there is a NULLVideoRender that presumably does no work other than consume the VideoData frame and condition the VideoData object to accept the next frame from GetFrame. Working with NULLVideoRender, I find it must require some programming that FWVideoRender does not require, because LockInputBuffer fails with -998 UMC_ERR_NOT_INITIALIZED. Am trying to step through simple_player.cpp and see how AVSync generically handles the different video renderers, but haven't figured it out yet.
0 Kudos
franknatoli
New Contributor I
315 Views
Below is code that will read an MPEG-4 or MPEG-2 stream and extract an RGB32 bitmap from a VideoData object without the use of a renderer. I recompiled the UMC libraries with /ZI and /Od, built a VS2005 project using simple_player, and followed the actions of the NULLVideoRender to see what manipulation of the VideoData object was necessary to bypass the renderer entirely. I found the renderer calling SetAlignment, Init, SetBufferPointer and SetDataSize. Code below calls those functions when compiled for no renderer.

Note, when not bypassing the renderer, the below code only works correctly for FWRENDERER not GDIRENDERER nor NULLRENDERER. Although simple_player works well with GDIVideoRender and NULLVideoRender, I could never quite get either of them to work in my program. No matter, I only really care about extracting the frame bitmap from the VideoData object and that works fine below.

Thanks again for help from forum members.

// AtsPlaybackIPPProc.cpp
//

#include "stdafx.h"
#include "AtsPlaybackIPP.h"

#include "AtsPlaybackIPPDoc.h"
#include "AtsPlaybackIPPView.h"

#include "ipp.h"
#include "umc_file_reader.h"
#include "umc_fio_reader.h"
#include "umc_h264_dec.h"
#include "umc_h264_spl.h"
#include "umc_h264_timing.h"
#include "umc_mp4_spl.h"
#include "umc_mpeg2_dec.h"
#include "umc_mpeg4_video_decoder.h"
#include "umc_splitter.h"
#include "umc_structures.h"
#include "umc_threaded_demuxer.h"
#include "umc_video_data.h"
#include "umc_video_render.h"
#include "gdi_video_render.h"
#include "fw_video_render.h"
#include "null_video_renderer.h"
#include "vm_time.h"

#ifdef _DEBUG
#define new DEBUG_NEW
#endif

//#define RWRENDERER

void CAtsPlaybackIPPView::PlaybackThread(void)
{
CString str;
UMC::Status umcResult;

//-------------------------------------------------------------------------
// init file reader
//-------------------------------------------------------------------------
UMC::FIOReader reader;
UMC::FileReaderParams readerParams;
readerParams.m_portion_size = 0;
vm_string_strcpy(readerParams.m_file_name, (LPCTSTR)m_strMpegFile);
umcResult = reader.Init(&readerParams);
if (umcResult != UMC::UMC_OK)
{
str.Format("FIOReader.Init failure %d", umcResult);
AfxMessageBox(str);
return;
}

//-------------------------------------------------------------------------
// init splitter
//-------------------------------------------------------------------------
UMC::Splitter *splitter;
// identify input stream and instantiate appropriate splitter
switch (UMC::Splitter::GetStreamType(&reader))
{
case UMC::MP4_ATOM_STREAM :
splitter = (UMC::Splitter*)(new UMC::MP4Splitter());
break;
case UMC::MPEGx_SYSTEM_STREAM :
&n bsp; splitter = (UMC::Splitter*)(new UMC::ThreadedDemuxer());
break;
default :
AfxMessageBox("Splitter::GetStreamType unknown");
return;
}

// initialize splitter
UMC::SplitterParams splitterParams;
splitterParams.m_lFlags = UMC::VIDEO_SPLITTER;
splitterParams.m_pDataReader = &reader;
umcResult = splitter->Init(splitterParams);
if (umcResult != UMC::UMC_OK)
{
str.Format("Splitter::Init failure %d", umcResult);
AfxMessageBox(str);
return;
}

// run splitter
umcResult = splitter->Run();
if (umcResult != UMC::UMC_OK)
{
str.Format("Splitter::Run failure %d", umcResult);
AfxMessageBox(str);
return;
}

// get splitter info
UMC::SplitterInfo *streamInfo;
umcResult = splitter->GetInfo(&streamInfo);
if (umcResult != UMC::UMC_OK)
{
str.Format("Splitter::GetInfo failure %d", umcResult);
AfxMessageBox(str);
return;
}

// search tracks for supported video data
Ipp32u videoTrack;
for (videoTrack = 0; videoTrack < streamInfo->m_nOfTracks; videoTrack++)
{
TRACE("videoTrack %d m_type 0x%X ", videoTrack, streamInfo->m_ppTrackInfo[videoTrack]->m_Type);
if (streamInfo->m_ppTrackInfo[videoTrack]->m_Type == UMC::TRACK_MPEG4V ||
streamInfo->m_ppTrackInfo[videoTrack]->m_Type == UMC::TRACK_H264 ||
streamInfo->m_ppTrackInfo[videoTrack]->m_Type == UMC::TRACK_MPEG2V)
break;
}

// if no track with support video data found
if (videoTrack >= streamInfo->m_nOfTracks)
{
AfxMessageBox("File does not contain TRACK_MPEG4V or TRACK_H264 or TRACK_MPEG2V");
return;
}

// point to video data
UMC::VideoStreamInfo *videoTrackInfo = (UMC::VideoStreamInfo *)streamInfo->m_ppTrackInfo[videoTrack]->m_pStreamInfo;

// log video data
TRACE("m_nOfTracks %d videoTrack %d stream_type 0x%X framerate %.1f clip_info.width %d clip_info.height %d color_format %d ",
streamInfo->m_nOfTracks,
&nbs p; videoTrack,
videoTrackInfo->stream_type,
videoTrackInfo->framerate,
videoTrackInfo->clip_info.width,
videoTrackInfo->clip_info.height,
videoTrackInfo->color_format);

//-------------------------------------------------------------------------
// init video decoder
//-------------------------------------------------------------------------
UMC::VideoDecoder *decoder;
switch (videoTrackInfo->stream_type)
{
case UMC::MPEG4_VIDEO :
decoder = (UMC::VideoDecoder*)(new UMC::MPEG4VideoDecoder());
break;
case UMC::H264_VIDEO :
decoder = (UMC::VideoDecoder*)(new UMC::H264VideoDecoder());
break;
case UMC::MPEG2_VIDEO :
decoder = (UMC::VideoDecoder*)(new UMC::MPEG2VideoDecoder());
break;
default :
str.Format("stream_type %d neither MPEG4_VIDEO nor H264_VIDEO nor MPEG2_VIDEO", videoTrackInfo->stream_type);
AfxMessageBox(str);
return;
}

UMC::VideoDecoderParams decoderParams;
decoderParams.info = *videoTrackInfo;
decoderParams.numThreads = 1;
decoderParams.lFlags = UMC::FLAG_VDEC_REORDER;
decoderParams.m_pData = streamInfo->m_ppTrackInfo[videoTrack]->m_pDecSpecInfo;
umcResult = decoder->Init(&decoderParams);
if (umcResult != UMC::UMC_OK)
{
str.Format("MPEG4VideoDecoder.Init failure %d", umcResult);
AfxMessageBox(str);
return;
}

//-------------------------------------------------------------------------
// init video renderer
//-------------------------------------------------------------------------
#ifdef FWRENDERER
UMC::VideoRenderParams renderParams;
umcResult = renderParams.out_data_template.Init(
videoTrackInfo->clip_info.width,
videoTrackInfo->clip_info.height,
UMC::RGB32); // formerly videoTrackInfo->color_format
if (umcResult != UMC::UMC_OK)
{
str.Format("VideoRenderParams.Init failure %d", umcResult);
AfxMessageBox(str );
return;
}

UMC::FWVideoRender render;
umcResult = render.Init(&renderParams);
if (umcResult != UMC::UMC_OK)
{
str.Format("FWVideoRender.Init failure %d", umcResult);
AfxMessageBox(str);
return;
}
#endif
#ifdef GDIRENDERER
UMC::GDIVideoRenderParams renderParams;
umcResult = renderParams.out_data_template.Init(
videoTrackInfo->clip_info.width,
videoTrackInfo->clip_info.height,
videoTrackInfo->color_format);
if (umcResult != UMC::UMC_OK)
{
str.Format("GDIVideoRenderParams.Init failure %d", umcResult);
AfxMessageBox(str);
return;
}

renderParams.m_hWnd = this->m_hWnd;
RECT rect;
GetClientRect(&rect);
renderParams.info.width = rect.right - rect.left;
renderParams.info.height = rect.top - rect.bottom;
renderParams.disp.left = rect.left;
renderParams.disp.right = rect.right;
renderParams.disp.top = rect.top;
renderParams.disp.bottom = rect.bottom;
renderParams.range.left = rect.left;
renderParams.range.right = rect.right;
renderParams.range.top = rect.top;
renderParams.range.bottom = rect.bottom;
renderParams.info = videoTrackInfo->clip_info;

UMC::GDIVideoRender render;
umcResult = render.Init(&renderParams);
if (umcResult != UMC::UMC_OK)
{
str.Format("GDIVideoRender.Init failure %d", umcResult);
AfxMessageBox(str);
return;
}
#endif
#ifdef NULLRENDERER
UMC::VideoRenderParams renderParams;
umcResult = renderParams.out_data_template.Init(
videoTrackInfo->clip_info.width,
videoTrackInfo->clip_info.height,
UMC::RGB32); // formerly videoTrackInfo->color_format
if (umcResult != UMC::UMC_OK)
{
str.Format("VideoRenderParams.out_data_template.Init failure %d", umcResult);
AfxMessageBox(str);
  ; return;
}

RECT rect;
GetClientRect(&rect);
UMC::RECT disp;
disp.left = (Ipp16s)rect.left;
disp.right = (Ipp16s)rect.right;
disp.top = (Ipp16s)rect.top;
disp.bottom = (Ipp16s)rect.bottom;
UMC::RECT range;
range.left = (Ipp16s)rect.left;
range.right = (Ipp16s)rect.right;
range.top = (Ipp16s)rect.top;
range.bottom = (Ipp16s)rect.bottom;

renderParams.disp = disp;
renderParams.range = range;
renderParams.lFlags = 0;

UMC::NULLVideoRender render;
umcResult = render.Init(&renderParams);
if (umcResult != UMC::UMC_OK)
{
str.Format("NULLVideoRender.Init failure %d", umcResult);
AfxMessageBox(str);
return;
}

umcResult = renderParams.out_data_template.Close();
if (umcResult != UMC::UMC_OK)
{
str.Format("VideoRenderParams.out_data_template.Close failure %d", umcResult);
AfxMessageBox(str);
return;
}

render.ShowSurface();

umcResult = render.ResizeDisplay(disp, range);
if (umcResult != UMC::UMC_OK)
{
str.Format("NULLVideoRender.ResizeDisplay failure %d", umcResult);
AfxMessageBox(str);
return;
}

render.ShowSurface();
#endif
//-------------------------------------------------------------------------
// process frames
//-------------------------------------------------------------------------
UMC::MediaData dataIn;
UMC::VideoData dataOut;
#if !defined(FWRENDERER) && !defined(GDIRENDERER) && !defined(NULLRENDERER)
umcResult = dataOut.SetAlignment(1);
if (umcResult != UMC::UMC_OK)
{
str.Format("VideoData.SetAlignment failure %d", umcResult);
AfxMessageBox(str);
return;
}

umcResult = dataOut.Init(
videoTrackInfo->clip_info.width,
videoTrackInfo->clip_info.height,
UMC::RGB32); // formerly videoTrackInfo->color_format
if (umcResult != UMC::UMC_OK)
{
str.Format("VideoData.Init failure %d", umcResult);
AfxMessageBox(str);
&nbs p; return;
}

size_t frameSize = videoTrackInfo->clip_info.width * videoTrackInfo->clip_info.height * 4;
Ipp8u *lpFrame = (Ipp8u*)new BYTE[frameSize];
if (!lpFrame)
{
AfxMessageBox("Memory allocation failure");
return;
}
umcResult = dataOut.SetBufferPointer(lpFrame, frameSize);
if (umcResult != UMC::UMC_OK)
{
str.Format("VideoData.SetBufferPointer failure %d", umcResult);
AfxMessageBox(str);
return;
}

dataOut.SetDataSize(0);
#endif
vm_tick total = 0;
vm_tick freq = vm_time_get_frequency();
int nframes = 0;
int decoderWait = 0;
while (1)
{
while (1)
{
umcResult = splitter->GetNextData(&dataIn, videoTrack);
if (umcResult != UMC::UMC_ERR_NOT_ENOUGH_DATA)
break;
vm_time_sleep(5);
}

if (umcResult != UMC::UMC_OK &&
umcResult != UMC::UMC_ERR_END_OF_STREAM)
{
str.Format("splitter->GetNextData failure %d", umcResult);
AfxMessageBox(str);
break;
}
#if defined(FWRENDERER) || defined(GDIRENDERER) || defined(NULLRENDERER)
UMC::Status umcResult2 = render.LockInputBuffer(&dataOut);
if (umcResult2 != UMC::UMC_OK)
{
str.Format("render.LockInputBuffer failure %d", umcResult2);
AfxMessageBox(str);
break;
}
#endif
vm_tick t0 = vm_time_get_tick();

// if call to GetNextVideoData was entirely successful then pass MediaData input to GetFrame
if (umcResult == UMC::UMC_OK)
umcResult = decoder->GetFrame(&dataIn, &dataOut);
// if call to GetNextVideoData was not entirely successful then pass NULL input to GetFrame
else
umcResult = decod er->GetFrame(NULL, &dataOut);

vm_tick t1 = vm_time_get_tick();
total += t1 - t0;

// if call to GetFrame resulted in end of stream
if (umcResult == UMC::UMC_ERR_END_OF_STREAM)
{
AfxMessageBox("decoder.GetFrame returned UMC_ERR_END_OF_STREAM");
break;
}

// if call to GetFrame resulted in fatal error
if (umcResult != UMC::UMC_OK &&
umcResult != UMC::UMC_ERR_NOT_ENOUGH_DATA)
{
str.Format("decoder.GetFrame failure %d", umcResult);
AfxMessageBox(str);
break;
}
#if defined(FWRENDERER) || defined(GDIRENDERER) || defined(NULLRENDERER)
umcResult2 = render.UnLockInputBuffer(&dataOut);
if (umcResult2 != UMC::UMC_OK)
{
str.Format("render.UnlockInputBuffer failure %d", umcResult2);
AfxMessageBox(str);
break;
}
#endif
// if call to GetFrame was entirely successful then render output frame
if (umcResult == UMC::UMC_OK)
{
TRACE("dataOut width %d height %d planes %d color_format %d ",
dataOut.GetWidth(), dataOut.GetHeight(), dataOut.GetNumPlanes(), dataOut.GetColorFormat());

struct UMC::VideoData::PlaneInfo planeInfo;
for (int plane = 0; plane < dataOut.GetNumPlanes(); plane++)
{
dataOut.GetPlaneInfo(&planeInfo, plane);
TRACE("plane %d m_pPlane 0x%08X width %d height %d iSampleSize %d iSamples %d iBitDepth %d nPitch %d nOffset 0x%X nMemSize %u ",
plane,
planeInfo.m_pPlane,
planeInfo.m_ippSize.width,
planeInfo.m_ippSize.height,
&nbs p; planeInfo.m_iSampleSize,
planeInfo.m_iSamples,
planeInfo.m_iBitDepth,
planeInfo.m_nPitch,
planeInfo.m_nOffset,
planeInfo.m_nMemSize);

// if first plane then pass planar data to view
if (plane == 0)
{
// if size of video frame has changed
if (m_lpFrameData &&
(planeInfo.m_ippSize.width != m_nFrameWidth || planeInfo.m_ippSize.height != m_nFrameHeight))
{
delete m_lpFrameData;
m_lpFrameData = NULL;
}

// if video frame not allocated
if (!m_lpFrameData)
{
if (planeInfo.m_nMemSize != planeInfo.m_ippSize.width * planeInfo.m_ippSize.height * 4)
{
str.Format("Actual m_nMemsize %u expected %u",
planeInfo.m_nMemSize, planeInfo.m_ippSize.width * planeInfo.m_ippSize.height);
AfxMessageBox(str);
return;
}
  ; m_nFrameWidth = planeInfo.m_ippSize.width;
m_nFrameHeight = planeInfo.m_ippSize.height;
m_lpFrameData = new BYTE[planeInfo.m_ippSize.width * planeInfo.m_ippSize.height * 4];
if (!m_lpFrameData)
{
AfxMessageBox("Memory allocation failure");
return;
}
}

// copy video frame data
memcpy(m_lpFrameData, planeInfo.m_pPlane, planeInfo.m_nMemSize);

// repaint view
Invalidate(FALSE);
}
}
#if defined(FWRENDERER) || defined(GDIRENDERER) || defined(NULLRENDERER)
TRACE("frame %d delta %d FPS %.1f ",
nframes, t1 - t0, 1.0 / ((t1 - t0) / (double)freq));

Ipp64f time = -1;
while (render.GetRenderFrame(&time) == UMC::UMC_ERR_TIMEOUT)
AfxMessageBox("render.GetRenderFrame UMC_ERR_TIMEOUT");

umcResult = render.RenderFrame();
if (umcResult != UMC::UMC_OK)
{
str.Format("render.RenderFrame failure %d", umcResult);
AfxMessageBox(str);
break;
}
#else
TRACE("frame %d not rendered ", nframes);
#endif
nframes++;
&n bsp; decoderWait = 0;
}
// check for stuck at UMC_ERR_NOT_ENOUGH_DATA
else if (++decoderWait >= 100)
{
AfxMessageBox("decoder stuck at UMC_ERR_NOT_ENOUGH_DATA");
return;
}
}
TRACE("exiting ");
}

DWORD WINAPI CAtsPlaybackIPPView::StartPlaybackThread(LPVOID arg)
{
CAtsPlaybackIPPView *lpView = (CAtsPlaybackIPPView*)arg;
lpView->PlaybackThread();
return(0);
}

0 Kudos
Reply