UMC::Status CVideoItem::InitScaling(int iTargetWndRight,int iTargetWndBottom, long lPitch, long lSize, long lSurfaceHeight, BYTE* pDecodeBuffer) { UMC::VideoData outputVideoDataUnscaled; UMC::VideoData outputVideoDataScaled; IppStatus stateUYVYtoYUY2,stateScalingYUY2,stateYUY12toUYVY; size_t videoframe_size_in; size_t videoframe_size_out; IppiSize sizeSrc,sizeDst; IppiRect srcRoi; IppiRect dstRoi; BYTE *p_frame_YUY2_in; BYTE *p_frame_YUY2_4conv; BYTE *p_frame_YUY2_out; BYTE *p_frame_UYVY_out; int iInterpolation; UMC::Status umcRes; IppiSize roiCopy; long lOutputPitch; long lScaledPitch; umcRes = UMC::UMC_ERR_FAILED; sizeSrc.width=m_videostreaminfo.clip_info.width; sizeSrc.height=m_videostreaminfo.clip_info.height; srcRoi.x=0; srcRoi.y=0; srcRoi.width=sizeSrc.width; srcRoi.height=sizeSrc.height; sizeDst.width=iTargetWndRight; sizeDst.height=iTargetWndBottom; videoframe_size_in=m_videostreaminfo.clip_info.width * 2 * m_videostreaminfo.clip_info.height; videoframe_size_out=sizeDst.width * 2 * sizeDst.height; dstRoi.x=0; // 352 dstRoi.y=0; // 288 dstRoi.width=sizeDst.width; dstRoi.height=sizeDst.height; p_frame_YUY2_in= new BYTE[videoframe_size_in]; p_frame_YUY2_4conv= new BYTE[videoframe_size_out]; p_frame_YUY2_out= new BYTE[videoframe_size_out]; p_frame_UYVY_out= new BYTE[videoframe_size_out]; // IPPI_INTER_NN = 1, // IPPI_INTER_LINEAR = 2, // IPPI_INTER_CUBIC = 4, // IPPI_INTER_CUBIC2P_BSPLINE, /* two-parameter cubic filter (B=1, C=0) */ // IPPI_INTER_CUBIC2P_CATMULLROM, /* two-parameter cubic filter (B=0, C=1/2) */ // IPPI_INTER_CUBIC2P_B05C03, /* two-parameter cubic filter (B=1/2, C=3/10) */ // IPPI_INTER_SUPER = 8, // IPPI_INTER_LANCZOS = 16, // iInterpolation = IPPI_INTER_NN; /* works */ // iInterpolation = IPPI_INTER_LINEAR;/* works */ iInterpolation = IPPI_INTER_CUBIC; /* works */ // iInterpolation = IPPI_INTER_CUBIC2P_BSPLINE; /* does not work */ // iInterpolation = IPPI_INTER_SUPER; /* does not work */ // iInterpolation = IPPI_INTER_LANCZOS; /* does not work */ // set a parameter field which seems to be for setting the display area but not the source resolution of // the video itself m_videostreaminfo.disp_clip_info.width=sizeDst.width; m_videostreaminfo.disp_clip_info.height=sizeDst.height; outputVideoDataUnscaled.Init(m_videostreaminfo.clip_info.width,m_videostreaminfo.clip_info.height, m_VideoFormat ); outputVideoDataScaled.Init(sizeDst.width,sizeDst.height,m_VideoFormat); // the debugger shows that the pitch is set by Init() correctly. seems to be that the pitch is the width*2 // in our case. lScaledPitch=outputVideoDataScaled.GetPlanePitch(0); if( m_VideoFormat == UMC::YV12 ) { outputVideoDataUnscaled.SetPlanePointer(pDecodeBuffer, 0); outputVideoDataUnscaled.SetPlanePointer(pDecodeBuffer + lPitch * lSurfaceHeight, 1); outputVideoDataUnscaled.SetPlanePointer(pDecodeBuffer + lPitch * lSurfaceHeight + lPitch * (lSurfaceHeight/4), 2); outputVideoDataUnscaled.SetPlanePitch(lPitch, 0); outputVideoDataUnscaled.SetPlanePitch(lPitch/ 2, 1); outputVideoDataUnscaled.SetPlanePitch(lPitch/ 2, 2); outputVideoDataUnscaled.SetDataSize( lSize ); } else { // seems to be the code segment which is called usually // unscaled buffer // set unscaled buffer to graphics device buffer // active if unscaled one have to be shown // outputVideoDataUnscaled.SetBufferPointer(pDecodeBuffer,lSize); // active if scaled one have to be shown outputVideoDataUnscaled.SetBufferPointer(p_frame_YUY2_in,videoframe_size_in); outputVideoDataUnscaled.SetDataSize(videoframe_size_in); outputVideoDataUnscaled.SetPlanePitch( lPitch, 0); // outputVideoDataUnscaled.SetBufferPointer(p_frame_YUY2_in,videoframe_size_out); // outputVideoDataUnscaled.SetDataSize(videoframe_size_out); // scaled buffer // take pitch from unscaled one // put scaled frame into a dummy buffer at the moment // active if unscaled one have to be shown // outputVideoDataScaled.SetBufferPointer(p_frame_UYVY_out,videoframe_size_out); // active if scaled one have to be shown outputVideoDataScaled.SetBufferPointer(pDecodeBuffer,videoframe_size_out); outputVideoDataScaled.SetDataSize(videoframe_size_out); // seems to be unnecessary to set the pitch cause it is done by Init()above // outputVideoDataScaled.SetPlanePitch( lPitch, 0 ); // just a way to put the scaled frame into the graphics device buffer // but at the moment the access to pDecodeBuffer does not work // outputVideoDataUnscaled.SetBufferPointer(p_frame_YUY2_in,videoframe_size_in); // outputVideoDataScaled.SetBufferPointer(pDecodeBuffer,videoframe_size_out); } if( m_pVideoDecoder == NULL ) { if (m_pVideoInputData != NULL) { // Raw Data: Copy or Flip roiCopy.width = GetPitch(); roiCopy.height = abs(m_videostreaminfo.clip_info.height); lOutputPitch = GetPitch(); // for what is this? // seems to deal with blend effects and using the last or the first video frame for this // or to fill a gap if the next frame is not available as fast as necessary ippiCopy_8u_C1R( (Ipp8u*)(m_pVideoInputData->GetDataPointer()), lPitch, pDecodeBuffer, lOutputPitch, roiCopy ); m_pVideoInputData = NULL; umcRes = UMC::UMC_OK; } else { umcRes = UMC::UMC_ERR_FAILED; } } else { // shift the decoded frame to the own unscaled buffer // after GetFrame() the frame is part of the buffer pDecodeBuffer umcRes = m_pVideoDecoder->GetFrame(m_pVideoInputData, &outputVideoDataUnscaled); if ( (p_frame_YUY2_in!=NULL)&& (p_frame_YUY2_out!=NULL)&& (p_frame_UYVY_out!=NULL)&& (p_frame_YUY2_4conv!=NULL)// &&(p_frame_test!=NULL) ) { // a flag here can enable scaling // memory was allocated successfully // converting UYVY to YUY2 // accessing pDecodeBuffer // using new in buffer for YUY2 stateUYVYtoYUY2=ippiCbYCr422ToYCbCr422_8u_C2R( // active if unscaled one have to be shown // (const Ipp8u*)pDecodeBuffer, // active if scaled one have to be shown (const Ipp8u*)p_frame_YUY2_in, m_videostreaminfo.clip_info.width*2, (Ipp8u*)p_frame_YUY2_4conv, m_videostreaminfo.clip_info.width*2, sizeSrc ); // some test stuff follows // a trial to get the small frame 352x288 into a big one // with 800x600 as resolution /* int i_step=sizeDst.width*2; Ipp16u* pDestSmallFrameInBiggerOne16u; Ipp8u* pDestSmallFrameInBiggerOne; IppStatus stateCopyInBiggerFrame; pDestSmallFrameInBiggerOne16u=ippiMalloc_16u_C1(sizeDst.width,sizeDst.height,&i_step); pDestSmallFrameInBiggerOne=(Ipp8u*)pDestSmallFrameInBiggerOne16u; if(pDestSmallFrameInBiggerOne!=NULL) { */ /* // hint from sergey intel results in a green frame with // a destroyed 352x288 frame in the upper left corner of the 800x600 // window roiCopy.height=sizeSrc.height; roiCopy.width=sizeSrc.width; stateCopyInBiggerFrame=ippiCopy_8u_C1R( (Ipp8u*)(p_frame_YUY2_4conv), sizeSrc.width*2, pDestSmallFrameInBiggerOne, sizeDst.width *2, roiCopy ); */ /* // my trial on my own to put the 352x288 into a 800x600 for resizing // but it does not work well // note: p_frame_test is not defined and the memory is not allocated anymore BYTE* p_src; BYTE* p_dest; p_src=p_frame_YUY2_4conv; p_dest=p_frame_test; memset(p_dest,0,videoframe_size_out); for(int i_row=0;i_row