您的位置:首页 > 其它

ffmpeg实现windows屏幕录制YUV420P并实现H264编码

2018-03-28 12:05 666 查看
完整工程下载
https://download.csdn.net/download/iot_shun/10313142
首先之前我们的代码利用gdigrab已经实现了屏幕录制功能,可是我们发现,保存的YYU42OP文件实现太大,100张150M,在音视频实时传输的过程中我们需要利用软硬件实现编码压缩,视频编码的方式有很多,例如h263,MPEG-2, H264等,这里我么采用的是H264编码实现效果:        YYU音视频文件(150M)压缩成只有491k视频录制效果:    首先看Yvu文件----->下载一个yuv播放器然后看h264文件两者效果一致;比之前屏幕录制YYU240P,这里只需要配置H264的编码器        //查找h264编码器        pH264Codec = avcodec_find_encoder(AV_CODEC_ID_H264);        if(!pH264Codec)        {          fprintf(stderr, "---------h264 codec not found----\n");          exit(1);        }        pH264CodecCtx = avcodec_alloc_context3(pH264Codec);        pH264CodecCtx->codec_id = AV_CODEC_ID_H264;        pH264CodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;        pH264CodecCtx->pix_fmt = PIX_FMT_YUV420P;        pH264CodecCtx->width = pCodecCtx->width;        pH264CodecCtx->height = pCodecCtx->height;        pH264CodecCtx->time_base.num = 1;        pH264CodecCtx->time_base.den = 15;//帧率(既一秒钟多少张图片)        pH264CodecCtx->bit_rate = 400000; //比特率(调节这个大小可以改变编码后视频的质量)        pH264CodecCtx->gop_size=12;        //H264 还可以设置很多参数 自行研究吧        pH264CodecCtx->qmin = 10;        pH264CodecCtx->qmax = 51;        // some formats want stream headers to be separate        if (pH264CodecCtx->flags & AVFMT_GLOBALHEADER)            pH264CodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;        // Set Option        AVDictionary *param = 0;        //H.264        av_dict_set(¶m, "preset", "superfast", 0);        av_dict_set(¶m, "tune", "zerolatency", 0);  //实现实时编码        if (avcodec_open2(pH264CodecCtx, pH264Codec,¶m) < 0){          printf("Failed to open video encoder1! 编码器打开失败!\n");          return false;        }------------------------------------------------分割线------------------------------------------------------然后再获取屏幕视频流后用H264编码sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
int y_size=pCodecCtx->width*pCodecCtx->height;
fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
//编码成h264文件
int ret1 = avcodec_encode_video2(pH264CodecCtx, packet,pFrameYUV, &got_picture);
if(ret1 < 0){
printf("Failed to encode! \n");
return -1;
}
ret1 = fwrite(packet->data,1 , packet->size, fp_h264 );
if (0 > ret1)
{
printf("write into output.h264 fail\n");
}
--------------------------------------------以下为完整代码---------------------------------------
#include <stdio.h>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
}
//'1' Use Dshow
//'0' Use VFW
#define USE_DSHOW 0
//Show Dshow Device
void show_dshow_device()
{
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVDictionary* options = NULL;
av_dict_set(&options,"list_devices","true",0);
AVInputFormat *iformat = av_find_input_format("dshow");
printf("========Device Info=============\n");
avformat_open_input(&pFormatCtx,"video=dummy",iformat,&options);
printf("================================\n");
}
//Show Dshow Device Option
void show_dshow_device_option()
{
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVDictionary* options = NULL;
av_dict_set(&options,"list_options","true",0);
AVInputFormat *iformat = av_find_input_format("dshow");
printf("========Device Option Info======\n");
avformat_open_input(&pFormatCtx,"video=Integrated Camera",iformat,&options);
printf("================================\n");
}
//Show VFW Device
void show_vfw_device()
{
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVInputFormat *iformat = av_find_input_format("vfwcap");
printf("========VFW Device Info======\n");
avformat_open_input(&pFormatCtx,"list",iformat,NULL);
printf("=============================\n");
}
//Show AVFoundation Device
void show_avfoundation_device()
{
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVDictionary* options = NULL;
av_dict_set(&options,"list_devices","true",0);
AVInputFormat *iformat = av_find_input_format("avfoundation");
printf("==AVFoundation Device Info===\n");
avformat_open_input(&pFormatCtx,"",iformat,&options);
printf("=============================\n");
}
#define USE_DSHOW 1
int main(int argc, char* argv[])
{
AVFormatContext	*pFormatCtx;
int				i, videoindex;
AVCodecContext	*pCodecCtx;
AVCodec			*pCodec;
AVCodecContext	*pH264CodecCtx;
AVCodec			*pH264Codec;
av_register_all();
avformat_network_init();
avdevice_register_all();//Register Device
pFormatCtx = avformat_alloc_context();
//抓取屏幕
AVInputFormat *ifmt=av_find_input_format("gdigrab");
if(avformat_open_input(&pFormatCtx,"desktop",ifmt,NULL)!=0){
printf("Couldn't open input stream. ");
return -1;
}
if(avformat_find_stream_info(pFormatCtx,NULL)<0)
{
printf("Couldn't find stream information.\n");
return -1;
}
videoindex=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
    {
            if(pFormatCtx->str1207feams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoindex=i;
}
}
if(videoindex==-1)
    {
printf("Couldn't find a video stream.\n");
return -1;
}
//根据视频中的流打开选择解码器
pCodecCtx=pFormatCtx->streams[videoindex]->codec;
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
    {
printf("Codec not found.\n");
return -1;
}
//打开解码器
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
    {
printf("Could not open codec.\n");
return -1;
}
AVFrame	*pFrame,*pFrameYUV;
pFrame=av_frame_alloc();
pFrameYUV=av_frame_alloc();
uint8_t *out_buffer=(uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
int ret, got_picture;
AVPacket *packet=(AVPacket *)av_malloc(sizeof(AVPacket));
AVPacket *packetH264=(AVPacket *)av_malloc(sizeof(AVPacket));
FILE *fp_yuv=fopen("output.yuv","wb");
FILE *fp_h264=fopen("output.h264","wb");
struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
///这里打印出视频的宽高
fprintf(stderr,"w= %d h= %d\n",pCodecCtx->width, pCodecCtx->height);
///我们就读取100张图像
//查找h264编码器
pH264Codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if(!pH264Codec)
    {
fprintf(stderr, "---------h264 codec not found----\n");
exit(1);
}
pH264CodecCtx = avcodec_alloc_context3(pH264Codec);
pH264CodecCtx->codec_id = AV_CODEC_ID_H264;
pH264CodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
pH264CodecCtx->pix_fmt = PIX_FMT_YUV420P;
pH264CodecCtx->width = pCodecCtx->width;
pH264CodecCtx->height = pCodecCtx->height;
pH264CodecCtx->time_base.num = 1;
pH264CodecCtx->time_base.den = 15;//帧率(既一秒钟多少张图片)
pH264CodecCtx->bit_rate = 400000; //比特率(调节这个大小可以改变编码后视频的质量)
pH264CodecCtx->gop_size=12;
//H264 还可以设置很多参数 自行研究吧
pH264CodecCtx->qmin = 10;
pH264CodecCtx->qmax = 51;
// some formats want stream headers to be separate
if (pH264CodecCtx->flags & AVFMT_GLOBALHEADER)
pH264CodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
// Set Option
AVDictionary *param = 0;
//H.264
av_dict_set(¶m, "preset", "superfast", 0);
av_dict_set(¶m, "tune", "zerolatency", 0);  //实现实时编码
if (avcodec_open2(pH264CodecCtx, pH264Codec,¶m) < 0){
printf("Failed to open video encoder1! 编码器打开失败!\n");
return false;
}
for(int i=0;i<100;i++)
    {
//读取截屏中的数据--->packet
if(av_read_frame(pFormatCtx, packet) < 0)
{
break;
}
if(packet->stream_index==videoindex)
{
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if(ret < 0){
printf("Decode Error.\n");
return -1;
    }
if(got_picture)
    {
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
int y_size=pCodecCtx->width*pCodecCtx->height;
fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
//编码成h264文件
int ret1 = avcodec_encode_video2(pH264CodecCtx, packet,pFrameYUV, &got_picture);
if(ret1 < 0){
printf("Failed to encode! \n");
return -1;
}
ret1 = fwrite(packet->data,1 , packet->size, fp_h264 );
if (0 > ret1)
{
printf("write into output.h264 fail\n");
}
    }
}
av_free_packet(packet);
}
sws_freeContext(img_convert_ctx);
fclose(fp_yuv);
av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}

                                            
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: