您的位置:首页 > 其它

ffmpeg解码h264

2015-10-26 16:38 267 查看
初始化ffmpeg

typedef struct
{
struct AVCodec *codec;// Codecl;
struct AVCodecContext *c;// Codec Context
int frame_count;
struct AVFrame *picture;// Frame
AVPacket avpkt;

int iWidth;
int iHeight;
int comsumedSize;
int got_picture;
}Decoder_Handle;
Decoder_Handle *pHandle = (Decoder_Handle *)malloc(sizeof(Decoder_Handle));
if (pHandle == NULL)
{
OutputDebugString(_T("-1\n"));
return -1;
}

avcodec_register_all();

av_init_packet(&(pHandle->avpkt));

//pHandle->codec = avcodec_find_decoder(AV_CODEC_ID_MPEG4);
pHandle->codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!pHandle->codec)
{
OutputDebugString(_T("-2\n"));
return -2;
}

pHandle->c = avcodec_alloc_context3(pHandle->codec);
if (!pHandle->c)
{
OutputDebugString(_T("-3\n"));
return -3;
}
if (pHandle->codec->capabilities&CODEC_CAP_TRUNCATED)
pHandle->c->flags |= CODEC_FLAG_TRUNCATED;

pHandle->c->extradata=new uint8_t[30];
memset(pHandle->c->extradata,0,30);
pHandle->c->codec_type = AVMEDIA_TYPE_VIDEO;
pHandle->c->pix_fmt = PIX_FMT_YUV420P;
pHandle->c->time_base.num = 1;
pHandle->c->frame_number = 1; //每包一个视频帧 ;
pHandle->c->bit_rate = 0;
pHandle->c->time_base.den = 30;//帧率  ;
pHandle->c->width = 320;//视频宽  ;
pHandle->c->height = 240;//视频高  ;

//if (avcodec_open2(pHandle->c, pHandle->codec, NULL) < 0)
//{
//	OutputDebugString(_T("-4\n"));
//	return -4;
//}

pHandle->picture = av_frame_alloc();
if (!pHandle->picture)
{
OutputDebugString(_T("-5\n"));
return -5;
}


接收sps和pps,然后赋值给AVCodecContext的extradata,sps和pps都进行下面操作

unsigned char startcode[] = {0,0,0,1};
memcpy(m_DecHandle->c->extradata+m_DecHandle->c->extradata_size,startcode,sizeof(startcode));
m_DecHandle->c->extradata_size+=sizeof(startcode);
memcpy(m_DecHandle->c->extradata+m_DecHandle->c->extradata_size,pWSABuf->buf+HeadOffset,nRet-HeadOffset);
m_DecHandle->c->extradata_size+=nRet-HeadOffset;


然后打开编码器

if (avcodec_open2(m_DecHandle->c, m_DecHandle->codec, NULL) < 0)
{
OutputDebugString(_T("-4\n"));
return ;
}
OutputDebugString(_T("打开解码器成功"));


打包

bool  StreamDecode::UnpackRTPH264( void   *  bufIn,  int  len,   void **  pBufOut,   int   *  pOutLen)
{
* pOutLen  =   0 ;
if  (len  <  RTP_HEADLEN)
{
return   false ;
}

unsigned  char *  src  =  (unsigned  char * )bufIn  +  RTP_HEADLEN;
unsigned  char  head1  =   * src; // 获取第一个字节 ;
unsigned  char  head2  =   * (src + 1 ); // 获取第二个字节 ;
unsigned  char  nal  =  head1  &   0x1f ; // 获取FU indicator的类型域,
unsigned  char  flag  =  head2  &   0xe0 ; // 获取FU header的前三位,判断当前是分包的开始、中间或结束
unsigned  char  nal_fua  =  (head1  &   0xe0 )  |  (head2  &   0x1f ); // FU_A nal
bool  bFinishFrame  =   false ;
if  (nal == 0x1c ) // 判断NAL的类型为0x1c=28,说明是FU-A分片
{ // fu-a
if  (flag == 0x80 ) // 开始
{
* pBufOut  =  src - 3 ;
* (( int * )( * pBufOut))  =   0x01000000  ; // zyf:大模式会有问题
* (( char * )( * pBufOut) + 4 )  =  nal_fua;
*  pOutLen  =  len  -  RTP_HEADLEN  +   3 ;

}
else   if (flag == 0x40 ) // 结束
{
* pBufOut  =  src + 2 ;
*  pOutLen  =  len  -  RTP_HEADLEN  -   2 ;

}
else // 中间
{
* pBufOut  =  src + 2 ;
*  pOutLen  =  len  -  RTP_HEADLEN  -   2 ;

}
}
else // 单包数据
{
* pBufOut  =  src - 4 ;
* (( int * )( * pBufOut))  =   0x01000000 ; // zyf:大模式会有问题
*  pOutLen  =  len  -  RTP_HEADLEN  +   4 ;
}

return  bFinishFrame;
}


打包后拼接成一帧数据;解码显示

Decoder_Decode(DECODER_H dwHandle, uint8_t *pDataIn, int nInSize)
{
if (dwHandle <= 0)
{
return -1;
}
DDSURFACEDESC2 ddsd;
memset(&ddsd, 0, sizeof (DDSURFACEDESC2));
ddsd.dwSize = sizeof(ddsd);
Decoder_Handle *pHandle = (Decoder_Handle *)dwHandle;
HRESULT ddRval;

av_init_packet(&(pHandle->avpkt));
pHandle->avpkt.size = nInSize;
pHandle->avpkt.data = pDataIn;

pHandle->avpkt.dts=0;
pHandle->avpkt.flags=1;
pHandle->avpkt.duration=1;
pHandle->avpkt.priv=0;
unsigned char * lpSurf;

pHandle->comsumedSize = avcodec_decode_video2(pHandle->c, pHandle->picture, &pHandle->got_picture, &(pHandle->avpkt));
if (pHandle->comsumedSize < 0)
{
OutputDebugString(_T("解码失败\n"));
return -2;
}

if (pHandle->got_picture)
{
//OutputDebugString(_T("解码显示\n"));
CWnd* pWnd = m_dlg->GetDlgItem(IDC_PLAYWINDOW);
CRect rc;
if(pWnd)
{
pWnd->GetWindowRect(rc);
int nWidth=pHandle->c->width; //rc.Width();
int nHeight=pHandle->c->height;//rc.Height() ;
AVFrame* pFrameYUV=avcodec_alloc_frame();
uint8_t *out_buffer=(uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P,nWidth, nHeight));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, nWidth, nHeight);
img_convert_ctx = sws_getContext(pHandle->c->width, pHandle->c->height, pHandle->c->pix_fmt, nWidth, nHeight, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
sws_scale(img_convert_ctx, (const uint8_t* const*) pHandle->picture->data,  pHandle->picture->linesize, 0,  pHandle->c->height, pFrameYUV->data, pFrameYUV->linesize);
sws_freeContext(img_convert_ctx);

ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT;
ddsd.ddsCaps.dwCaps = DDSCAPS_OFFSCREENPLAIN | DDSCAPS_VIDEOMEMORY;// | DDSCAPS_SYSTEMMEMORY ;
ddsd.dwHeight = nHeight;
ddsd.dwWidth = nWidth;
ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
ddsd.ddpfPixelFormat.dwFlags  = DDPF_FOURCC | DDPF_YUV;
ddsd.ddpfPixelFormat.dwFourCC =MAKEFOURCC('Y','V','1','2');// // MAKEFOURCC('I','4','2','0') ;
ddsd.ddpfPixelFormat.dwYUVBitCount = 8;

if (DD_OK != (ddRval = lpDD->CreateSurface(&ddsd, &lpDDSOffScreen, NULL)))
{
OutputDebugString(_T("创建离屏表面失败\n"));
AfxMessageBox(_T("创建离屏表面失败!\n"));
return 0;
}
do
{
ddRval = lpDDSOffScreen->Lock(NULL,&ddsd,DDLOCK_WAIT | DDLOCK_WRITEONLY,NULL);
} while (ddRval == DDERR_WASSTILLDRAWING);
if (ddRval != DD_OK)
{
OutputDebugString(_T("写入离屏表面失败\n"));
AfxMessageBox(_T("写入离屏表面失败!\n"));

}
lpSurf = (unsigned char *)(ddsd.lpSurface);
LPBYTE PtrY = pFrameYUV->data[0];
LPBYTE PtrU = pFrameYUV->data[1];
LPBYTE PtrV = pFrameYUV->data[2];
if(lpSurf)
{
int i = 0;
for(i = 0; i < ddsd.dwHeight; i++)
{

memcpy(lpSurf, PtrY, ddsd.dwWidth);
PtrY += ddsd.dwWidth ;
lpSurf += ddsd.lPitch;
}

for (i = 0; i < ddsd.dwHeight / 2; i++)
{
//memcpy(lpSurf, lpV, ddsd.dwWidth / 2);
//lpV += pFrame->Width / 2;
//lpSurf += ddsd.lPitch / 2;
memcpy(lpSurf, PtrV, ddsd.dwWidth/2);
PtrV += ddsd.dwWidth / 2;
lpSurf += ddsd.lPitch/2;
}
for (i = 0; i < ddsd.dwHeight / 2; i++)
{
memcpy(lpSurf, PtrU, ddsd.dwWidth/2);
PtrU += ddsd.dwWidth/ 2;
lpSurf += ddsd.lPitch/2;
}

}

lpDDSOffScreen->Unlock(NULL);
CRect LocalRect;

GetWindowRect(pWnd->m_hWnd, &LocalRect);
//LocalRect.bottom += (LocalRect.bottom - LocalRect.top) * 72 / 278;

HRESULT ddres=lpDDSPrimary->Blt(&LocalRect, lpDDSOffScreen, NULL, DDBLT_WAIT, NULL);

av_free(out_buffer);
av_free(pFrameYUV);
if (lpDDSOffScreen)
{
lpDDSOffScreen->Release();
lpDDSOffScreen = NULL;
}
}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: