您的位置:首页 > 移动开发 > Android开发

在Andorid中使用FFmpeg实现MP4/AVI/H.264解析为BMP:RGB解析为BMP

2017-09-05 19:23 816 查看

在Andorid中使用FFmpeg实现MP4/AVI/H.264解析为BMP:RGB解析为BMP

最进根据公司项目需要,学习FFmpeg音视频编解码做技术储备,项目是运行在android平台上的,现在需求还不确定,需要MP4/AVI/H.264视频转换为一张张BMP,先把MP4/AVI/H.264转换为RGB,再把RGB转化为BMP,本篇主要实现RGB转化为BMP。

MP4/AVI/H.264转换为RGB参考 在Andorid中使用FFmpeg实现MP4/AVI/H.264解析为BMP:MP4/AVI/H.264解析为RGB

native-lib.cpp核心代码

/**
* RGB帧->BMP
* 保存BMP文件的函数
* @param pFrameRGB
* @param width
* @param height
* @param index
* @return
*/

int SaveAsBMP(AVFrame *pFrameRGB, int width, int height, int index) {
typedef struct {
long imageSize;
long blank;
long startPosition;
} BmpHead;

typedef struct {
long Length;
long width;
long height;
unsigned short colorPlane;
unsigned short bitColor;
long zipFormat;
long realSize;
long xPels;
long yPels;
long colorUse;
long colorImportant;
} InfoHead;
char *filename = new char[255];  //文件存放路径,根据自己的修改

sprintf(filename, "%s_%d.bmp", "/storage/emulated/0/Download/avtest/img/", index);

BmpHead m_BMPHeader = {0};
InfoHead m_BMPInfoHeader = {0};
char bfType[2] = {'B', 'M'};
int header_size = sizeof(bfType) + sizeof(BmpHead) + sizeof(InfoHead);
unsigned char *rgb24_buffer = NULL;
FILE *fp_bmp = NULL;
if ((fp_bmp = fopen(filename, "wb")) == NULL) {
printf("Error: Cannot open output BMP file.\n");
return -1;
}
m_BMPHeader.imageSize = 3 * width * height + header_size;
m_BMPHeader.startPosition = header_size;

m_BMPInfoHeader.Length = sizeof(InfoHead);
m_BMPInfoHeader.width = width;
//BMP storage pixel data in opposite direction of Y-axis (from bottom to top).
m_BMPInfoHeader.height = -height;
m_BMPInfoHeader.colorPlane = 1;
m_BMPInfoHeader.bitColor = 24;
m_BMPInfoHeader.realSize = 3 * width * height;

fwrite(bfType, 1, sizeof(bfType), fp_bmp);
fwrite(&m_BMPHeader, 1, sizeof(m_BMPHeader), fp_bmp);
fwrite(&m_BMPInfoHeader, 1, sizeof(m_BMPInfoHeader), fp_bmp);

rgb24_buffer = (unsigned char *) malloc(width * height * 3);

fwrite(pFrameRGB->data[0], width * height * 24 / 8, 1, fp_bmp);
fclose(fp_bmp);
free(rgb24_buffer);
return 0;

}

/***
* avi/mp4/h264转换成Bitmap
* avi/mp4/h264->RGB->BMP
*/
extern "C"
JNIEXPORT jint JNICALL
Java_com_yodosmart_ffmpegdemo_MainActivity_avToBitmap(JNIEnv *env, jobject instance,
jstring input_jstr, jstring output_jstr) {
//h264ToYue
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
uint8_t *out_buffer;
AVPacket *packet;
int y_size;
int ret, got_picture;
struct SwsContext *img_convert_ctx;
struct SwsContext *img_convert_ctx_rgb;
FILE *fp_yuv;
int frame_cnt;
clock_t time_start, time_finish;
double time_duration = 0.0;

char input_str[500] = {0};
char output_str[500] = {0};
char info[1000] = {0};
sprintf(input_str, "%s", env->GetStringUTFChars(input_jstr, NULL));
sprintf(output_str, "%s", env->GetStringUTFChars(output_jstr, NULL));
FILE *output = fopen(output_str, "wb+");
//FFmpeg av_log() callback
av_log_set_callback(custom_log);

av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();

if (avformat_open_input(&pFormatCtx, input_str, NULL, NULL) != 0) {
LOGE("Couldn't open input stream.\n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for (i = 0; i < pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
if (videoindex == -1) {
LOGE("Couldn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
LOGE("Couldn't find Codec.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Couldn't open codec.\n");
return -1;
}

pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
//  out_buffer = (unsigned char *) av_malloc(
//          av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1));
//  av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer,
//       AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
out_buffer = (unsigned char *) av_malloc(
av_image_get_buffer_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer,
AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);

packet = (AVPacket *) av_malloc(sizeof(AVPacket));

//  img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
//   pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P,
//   SWS_BICUBIC, NULL, NULL, NULL);
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_BGR24,
SWS_BICUBIC, NULL, NULL, NULL);

sprintf(info, "[Input     ]%s\n", input_str);
sprintf(info, "%s[Output    ]%s\n", info, output_str);
sprintf(info, "%s[Format    ]%s\n", info, pFormatCtx->iformat->name);
sprintf(info, "%s[Codec     ]%s\n", info, pCodecCtx->codec->name);
sprintf(info, "%s[Resolution]%dx%d\n", info, pCodecCtx->width, pCodecCtx->height);

fp_yuv = fopen(output_str, "wb+");
if (fp_yuv == NULL) {
printf("Cannot open output file.\n");
return -1;
}

frame_cnt = 0;
time_start = clock();

while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoindex) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0) {
LOGE("Decode Error.\n");
return -1;
}
if (got_picture) {
sws_scale(img_convert_ctx, (const uint8_t *const *) pFrame->data, pFrame->linesize,
0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
y_size = pCodecCtx->width * pCodecCtx->height;

//RGB
//转换
fwrite(pFrameRGB->data[0], (pCodecCtx->width) * (pCodecCtx->height) * 3, 1, output);
SaveAsBMP(pFrameRGB, pCodecCtx->width, pCodecCtx->height, frame_cnt);
//fwrite(pFrameRGB->data[0], 1, y_size, fp_yuv);    //Y
//fwrite(pFrameRGB->data[1], 1, y_size / 4, fp_yuv);  //U
//fwrite(pFrameRGB->data[2], 1, y_size / 4, fp_yuv);  //V
//Output info
char pictype_str[10] = {0};
switch (pFrame->pict_type) {
case AV_PICTURE_TYPE_I:
sprintf(pictype_str, "I");
break;
case AV_PICTURE_TYPE_P:
sprintf(pictype_str, "P");
break;
case AV_PICTURE_TYPE_B:
sprintf(pictype_str, "B");
break;
default:
sprintf(pictype_str, "Other");
break;
}
LOGI("Frame Index: %5d. Type:%s", frame_cnt, pictype_str);
frame_cnt++;
}
}
av_free_packet(packet);
}

//flush decoder
//FIX: Flush Frames remained in Codec
while (1) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0)
break;
if (!got_picture)
break;
//  sws_scale(img_convert_ctx, (const uint8_t *const *) pFrame->data, pFrame->linesize, 0,
//  pCodecCtx->height,
//  pFrameRGB->data, pFrameRGB->linesize);
//  int y_size = pCodecCtx->width * pCodecCtx->height;

sws_scale(img_convert_ctx, (const uint8_t *const *) pFrame->data, pFrame->linesize, 0,
pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
int y_size = pCodecCtx->width * pCodecCtx->height;
//RGB
//转换
fwrite(pFrameRGB->data[0], (pCodecCtx->width) * (pCodecCtx->height) * 3, 1, output);

SaveAsBMP(pFrameRGB, pCodecCtx->width, pCodecCtx->height, frame_cnt);
//      fwrite(pFrameRGB->data[0], 1, y_size, fp_yuv);    //Y
//      fwrite(pFrameRGB->data[1], 1, y_size / 4, fp_yuv);  //U
//      fwrite(pFrameRGB->data[2], 1, y_size / 4, fp_yuv);  //V
//Output info
char pictype_str[10] = {0};
switch (pFrame->pict_type) {
case AV_PICTURE_TYPE_I:
sprintf(pictype_str, "I");
break;
case AV_PICTURE_TYPE_P:
sprintf(pictype_str, "P");
break;
case AV_PICTURE_TYPE_B:
sprintf(pictype_str, "B");
break;
default:
sprintf(pictype_str, "Other");
break;
}
LOGI("Frame Index: %5d. Type:%s", frame_cnt, pictype_str);
frame_cnt++;
}
time_finish = clock();
time_duration = (double) (time_finish - time_start);

sprintf(info, "%s[Time      ]%fms\n", info, time_duration);
sprintf(info, "%s[Count     ]%d\n", info, frame_cnt);

sws_freeContext(img_convert_ctx);

fclose(fp_yuv);

av_frame_free(&pFrameRGB);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return frame_cnt;
}


注意: 实际上是BGR->BMP

如果使用RGB参考下列代码进行RGB->BGR

//BMP save R1|G1|B1,R2|G2|B2 as B1|G1|R1,B2|G2|R2
//It saves pixel data in Little Endian
//So we change 'R' and 'B'
for (j = 0; j < height; j++) {
for (i = 0; i < width; i++) {
char temp = rgb24_buffer[(j * width + i) * 3 + 2];
rgb24_buffer[(j * width + i) * 3 + 2] = rgb24_buffer[(j * width + i) * 3 + 0];
rgb24_buffer[(j * width + i) * 3 + 0] = temp;
}
}


MainActivity.java 核心代码

static {
System.loadLibrary("native-lib");
}

public native int avToBitmap(String input_jstr, String output_jstr);

/**
* 确认按钮,开始转码
*/
private void conversion(final TextView tvRoute) {
dialog.showDialog();
String[] videoInfo = tvRoute.getText().toString().split("/");
String fileName = videoInfo[videoInfo.length - 1];
String filePath = tvRoute.getText().toString().replace(fileName, "");
final String[] fileNames = fileName.split("\\.");
new Thread(new Runnable() {
public void run() {
picNum = avToBitmap(tvRoute.getText().toString(), "/storage/emulated/0/Download/avtest/" + fileNames[0] + ".rgb");
//转码成功
if (picNum >= 0) {
handler.sendEmptyMessage(0);
} else {
handler.sendEmptyMessage(1);
}
}
}).start();
}

Handler handler = new Handler(new Handler.Callback() {
@Override
public boolean handleMessage(Message message) {
if (message.what == 0) {
dealResult();
} else if (message.what == 1) {
Toast.makeText(MainActivity.this, "Error", Toast.LENGTH_SHORT).show();
dialog.dismiss();
}
return false;
}
});

private void dealResult() {
for (int i = 0; i < picNum; i++) {
dataImage.add("/storage/emulated/0/Download/avtest/img/_" + i + ".bmp");
}
adapterImage.notifyDataSetChanged();
dialog.dismiss();
}


转码过程时间比较程,放在子线程中进行,返回解析生成的bmp数量,Handler中更新UI,主要是刷新展示用的RecyclerView。

代码下载

参考资料

http://blog.csdn.net/leixiaohua1020/article/details/50534150
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  android ffmpeg bmp rgb