您的位置:首页 > 其它

windows下 使用 ffmpeg 解码显示编码为H264的RTSP流

2015-04-22 08:53 555 查看
参考的文章:


100行代码实现最简单的基于FFMPEG+SDL的视频播放器

显示使用的是 SDL ,windows下用 GDI+ 显示数据里的图片,以下为代码 ,

typedef struct tagVideoData

{

tagVideoData()

{

bRunning = TRUE;

hDC = NULL;

}

CString strRTSP;

HWND hWnd;

BOOL bRunning;

HANDLE hProc;

HDC hDC;

CRect rectPre;

} VIDEODATA,*LPVIDEODATA;

DWORD WINAPI ShowVideoProc(LPVOID lpParameter)

{

VIDEODATA *pData = (VIDEODATA *)lpParameter;

if (NULL == pData)

return 1;

while (TRUE)

{

av_register_all();

avformat_network_init();

AVFormatContext *pFormatCtx = avformat_alloc_context();

if(avformat_open_input(&pFormatCtx,pData->strRTSP.GetBuffer(0),NULL,NULL)!=0){

printf("Couldn't open input stream.\n");

break;

}

if(avformat_find_stream_info(pFormatCtx,NULL)<0){

printf("\n 卡在这里的时候关闭容易崩溃,Couldn't find stream information.\n");

break;

}

if (!IsWindow(pData->hWnd))

break;

int i, videoindex;

AVCodecContext *pCodecCtx;

AVCodec *pCodec;

videoindex = -1;

for(i = 0; i < pFormatCtx->nb_streams; i++)

{

if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)

{

videoindex = i;

break;

}

}

if(videoindex == -1)

{

OutputDebugString("Didn't find a video stream.\n");

break;

}

pCodecCtx = pFormatCtx->streams[videoindex]->codec;

pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

if(pCodec == NULL){

OutputDebugString("Codec not found.\n");

break;

}

if(avcodec_open2(pCodecCtx, pCodec,NULL) < 0)

{

OutputDebugString("Could not open codec.\n");

break;

}

AVFrame *pFrame,*pFrameYUV;

AVPacket *packet;

pFrame = av_frame_alloc();

pFrameYUV = av_frame_alloc();

int screen_w = pCodecCtx->width;

int screen_h = pCodecCtx->height;

packet = (AVPacket *)av_malloc(sizeof(AVPacket));

av_dump_format(pFormatCtx,0,pData->strRTSP.GetBuffer(0),0);

struct SwsContext *img_convert_ctx;

img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,

pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

int ret = 0;

int got_picture = 0;

while(av_read_frame(pFormatCtx, packet) >=0

&& pData->bRunning

&& IsWindow(pData->hWnd)

)

{

if(packet->stream_index == videoindex)

{

//Decode

ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);

if(ret < 0)

{

OutputDebugString("Decode Error.\n");

break;

}

if(got_picture)

{

pData->hDC = ShowVideoPic(pFrame,pFrameYUV,pCodecCtx,pData->hWnd,pData->hDC,pData->rectPre);

Sleep(10);

}

}

av_free_packet(packet);

}

//FIX: Flush Frames remained in Codec

while (1

&& pData->bRunning

&& IsWindow(pData->hWnd)

)

{

ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);

if (ret < 0)

break;

if (!got_picture)

break;

pData->hDC = ShowVideoPic(pFrame,pFrameYUV,pCodecCtx,pData->hWnd,pData->hDC,pData->rectPre);

}

sws_freeContext(img_convert_ctx);

//av_free(out_buffer);

av_free(pFrame);

av_free(pFrameYUV);

avcodec_close(pCodecCtx);

avformat_close_input(&pFormatCtx);

break;

}

CloseHandle(pData->hProc);

// if (IsWindow(pData->hWnd))

// {

// SendMessage(pData->hWnd,USER_END_PLAY,(WPARAM)pData,pData->bRunning);

// }

if (NULL != pData)

DeleteDC(pData->hDC);

delete pData;

return 0;

}

HDC ShowVideoPic(AVFrame *pFame,AVFrame *pFrameRGB,AVCodecContext *pContent,HWND hWnd,HDC &hDC,CRect &rectPre)

{

if (NULL == pFame)

return hDC;

if (NULL == pFame)

return hDC;

if (NULL == pContent)

return hDC;

if (NULL == pFame->data)

return hDC;

if (!IsWindow(hWnd))

return hDC;

if (pFame->width != pContent->width

|| pFame->height != pContent->height

)

return hDC;

if (0 == pFame->width

|| 0 == pFame->height

|| !IsWindowVisible(hWnd)

)

return hDC;

CRect rect;

GetClientRect(hWnd,&rect);

if (rect.Width() <= 0

|| rect.Height() <= 0

)

return hDC;

int nWidthRect = rect.Width();

int nHeightRect = rect.Height();

int nWidth = pContent->width;

int nHeight = pContent->height;

//

// if (nWidthRect > nHeightRect)

// {

// nHeightRect = (double)nWidthRect / (double)nWidth * nHeight ;

// }

// else

// {

// nWidthRect = (double) nHeightRect / (double)nHeight * nWidth;

// }

double dScale = (double) nWidthRect / (double)nWidth;

dScale = max(0.1,dScale);

dScale = min(1.0,dScale);

nWidthRect = nWidth * dScale;

nWidthRect -= nWidthRect % 4;

dScale = (double)nWidthRect / (double)nWidth;

nHeightRect = nHeight * dScale;

SwsContext *img_convert_ctx ;

int frame_count = 0;

int nCountT = avpicture_get_size(AV_PIX_FMT_BGR24, nWidthRect,nHeightRect);

uint8_t * pOutBuffer = new uint8_t[nCountT];

if (avpicture_fill((AVPicture *)pFrameRGB, pOutBuffer, AV_PIX_FMT_BGR24, nWidthRect, nHeightRect) > 0 )

{

img_convert_ctx = sws_getCachedContext(NULL,nWidth, nHeight, pContent->pix_fmt, nWidthRect,nHeightRect, AV_PIX_FMT_BGR24,SWS_BILINEAR, NULL, NULL, NULL);

if (NULL != img_convert_ctx)

{

int nHeightOut = sws_scale(img_convert_ctx, (const uint8_t* const*)pFame->data, pFame->linesize, 0, nHeight, pFrameRGB->data, pFrameRGB->linesize);

if (nHeightOut > 0)

{

Bitmap bitmap(nWidthRect,nHeightRect,nWidthRect * 3,PixelFormat24bppRGB,pFrameRGB->data[0]);

if (bitmap.GetLastStatus() == Ok

&& IsWindow(hWnd)

&& IsWindowVisible(hWnd)

)

{

HDC hDCTemp = GetDC(hWnd);

HBITMAP hBmp = NULL;

if (!rectPre.EqualRect(&rect))

{

DeleteDC(hDC);

hDC = CreateCompatibleDC(hDCTemp);

hBmp = CreateCompatibleBitmap(hDCTemp,rect.Width(),rect.Height());

SelectObject(hDC,hBmp);

rectPre = rect;

}

Graphics graphics(hDC);

graphics.DrawImage(&bitmap,Rect(rect.left,rect.top,rect.Width(),rect.Height()),0,0,nWidthRect,nHeightRect,UnitPixel);

StretchBlt(hDCTemp,0,0,rect.Width(),rect.Height(),hDC,0,0,rect.Width(),rect.Height(),SRCCOPY);

ReleaseDC(hWnd,hDCTemp);

if (NULL != hBmp)

DeleteObject(hBmp);

}

}

}

sws_freeContext(img_convert_ctx);

}

delete []pOutBuffer;

return hDC;

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: