Android 的视频编码 H263 MP4V H264
2013-12-31 14:40
253 查看
编码器使用的是x264的开源库,
很容易看懂的
简单的封装了一个JNI库
编码库在BBS里 CSDN的资源太难用了
http://www.eoeandroid.com/forum.php?mod=viewthread&tid=52739&extra=
x264的编译放方法
export ARM_ROOT=$ANDROID_NDK_ROOT
export ARM_INC=$ARM_ROOT/build/platforms/android-5/arch-arm/usr/include/
export ARM_LIB=$ARM_ROOT/build/platforms/android-5/arch-arm/usr/lib/
export ARM_TOOL=$ARM_ROOT/build/prebuilt/windows/arm-eabi-4.4.0
export ARM_LIBO=$ARM_TOOL/lib/gcc/arm-eabi/4.4.0
export PATH=$ARM_TOOL/bin:$PATH
export ARM_PRE=arm-eabi
./configure --prefix=/home/egmkang/libx264 --enable-shared /
-disable-asm --host=arm-linux --cross-prefix=arm-eabi-/
--extra-cflags=" -I$ARM_INC -fPIC -DANDROID -fpic -mthumb-interwork -ffunction-sections -funwind-tables -fstack-protector -fno-short-enums -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ -Wno-psabi -march=armv5te -mtune=xscale -msoft-float
-mthumb -Os -fomit-frame-pointer -fno-strict-aliasing -finline-limit=64 -DANDROID -Wa,--noexecstack -MMD -MP "/
--extra-ldflags="-nostdlib -Bdynamic -Wl,--no-undefined -Wl,-z,noexecstack -Wl,-z,nocopyreloc -Wl,-soname,/system/lib/libz.so -Wl,-rpath-link=$ARM_LIB,-dynamic-linker=/system/bin/linker -L$ARM_LIB -nostdlib $ARM_LIB/crtbegin_dynamic.o $ARM_LIB/crtend_android.o
-lc -lm -ldl -lgcc"
这里生成的是x264的静态库
整个工程唯一有点麻烦的是 生成 JNI 动态库的时候 报错 。。
后来发现是少链接了一个库,
于是根据x264的编译方法 在Android.mk添加一些配置就可以了。当然这就是难点,在网上查了很多都没有结果。
有些目录的参数自己调整哈
我把前面生成的libx264.a 和 x264.h 文件放到jni的libx264目录下了 有问题自己调整
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_C_INCLUDES +=$(LOCAL_PATH)/libx264/include
LOCAL_MODULE := H264Android
LOCAL_SRC_FILES := H264Android.c
LOCAL_LDFLAGS += $(LOCAL_PATH)/libx264/lib/libx264.a
LOCAL_LDLIBS := -L$(SYSROOT)/usr/lib -lgcc
include $(BUILD_SHARED_LIBRARY)
估计很多人都会发现很熟悉 嘻嘻 这个就是根据
http://www.cnblogs.com/mcodec/articles/1780598.html
改的 连文件名字都没有换!!比较懒
另: 编码的效率很低下啊
AndroidVideo.java
[java] view
plaincopy
import java.io.File;
import java.io.RandomAccessFile;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.content.res.Configuration;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
import android.view.SurfaceHolder.Callback;
import android.graphics.PixelFormat;
import android.hardware.Camera;
public class AndroidVideo extends Activity implements Callback,
Camera.PictureCallback {
private SurfaceView mSurfaceView = null;
private SurfaceHolder mSurfaceHolder = null;
private Camera mCamera = null;
private boolean mPreviewRunning = false;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.camera);
mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
if (mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(352, 288);
mCamera.setPreviewCallback(new H264Encoder(352, 288));
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (Exception ex) {
}
mCamera.startPreview();
mPreviewRunning = true;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
mCamera = null;
}
}
public void onConfigurationChanged(Configuration newConfig) {
try {
super.onConfigurationChanged(newConfig);
if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
} else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
}
} catch (Exception ex) {
}
}
}
class H264Encoder implements Camera.PreviewCallback {
long encoder=0;
RandomAccessFile raf=null;
byte[] h264Buff =null;
static {
System.loadLibrary("H264Android");
}
private H264Encoder(){};
public H264Encoder(int width, int height) {
encoder = CompressBegin(width, height);
h264Buff = new byte[width * height *8];
try {
File file = new File("/sdcard/camera.h264");
raf = new RandomAccessFile(file, "rw");
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
};
protected void finalize()
{
CompressEnd(encoder);
if (null != raf)
{
try {
raf.close();
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
}
try {
super.finalize();
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private native long CompressBegin(int width,int height);
private native int CompressBuffer(long encoder, int type,byte[] in, int insize,byte[] out);
private native int CompressEnd(long encoder);
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
int result=CompressBuffer(encoder, -1, data, data.length,h264Buff);
try {
if (result>0)
raf.write(h264Buff, 0, result);
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
}
}
H264Android.c
[cpp] view
plaincopy
#include <string.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include <arpa/inet.h>
#include <x264.h>
#define DATA_MAX 3000000
#define H264_MTU 1024
typedef struct
{
x264_param_t * param;
x264_t *handle;
x264_picture_t * picture;
x264_nal_t *nal;
} Encoder;
jlong Java_h264_com_H264Encoder_CompressBegin(JNIEnv* env, jobject thiz,
jint width, jint height) {
Encoder * en = (Encoder *) malloc(sizeof(Encoder));
en->param = (x264_param_t *) malloc(sizeof(x264_param_t));
en->picture = (x264_param_t *) malloc(sizeof(x264_picture_t));
x264_param_default(en->param); //set default param
//en->param->rc.i_rc_method = X264_RC_CQP;
en->param->i_log_level = X264_LOG_NONE;
en->param->i_width = width; //set frame width
en->param->i_height = height; //set frame height
en->param->rc.i_lookahead =0;
en->param->i_bframe=0;
en->param->i_fps_num =5;
en->param->i_fps_den = 1;
if ((en->handle = x264_encoder_open(en->param)) == 0) {
return 0;
}
/* Create a new pic */
x264_picture_alloc(en->picture, X264_CSP_I420, en->param->i_width,
en->param->i_height);
return (jlong) en;
}
jint Java_h264_com_H264Encoder_CompressEnd(JNIEnv* env, jobject thiz,jlong handle)
{
Encoder * en = (Encoder *) handle;
if(en->picture)
{
x264_picture_clean(en->picture);
free(en->picture);
en->picture = 0;
}
if(en->param)
{
free(en->param);
en->param=0;
}
if(en->handle)
{
x264_encoder_close(en->handle);
}
free(en);
return 0;
}
jint Java_h264_com_H264Encoder_CompressBuffer(JNIEnv* env, jobject thiz,jlong handle,jint type,jbyteArray in, jint insize,jbyteArray out)
{
Encoder * en = (Encoder *) handle;
x264_picture_t pic_out;
int i_data=0;
int nNal=-1;
int result=0;
int i=0,j=0;
int nPix=0;
jbyte * Buf = (jbyte*)(*env)->GetByteArrayElements(env, in, 0);
jbyte * h264Buf = (jbyte*)(*env)->GetByteArrayElements(env, out, 0);
unsigned char * pTmpOut = h264Buf;
int nPicSize=en->param->i_width*en->param->i_height;
/*
Y数据全部从在一块,UV数据使用interleave方式存储
YYYY
YYYY
UVUV
*/
jbyte * y=en->picture->img.plane[0];
jbyte * v=en->picture->img.plane[1];
jbyte * u=en->picture->img.plane[2];
memcpy(en->picture->img.plane[0],Buf,nPicSize);
for (i=0;i<nPicSize/4;i++)
{
*(u+i)=*(Buf+nPicSize+i*2);
*(v+i)=*(Buf+nPicSize+i*2+1);
}
switch (type)
{
case 0:
en->picture->i_type = X264_TYPE_P;
break;
case 1:
en->picture->i_type = X264_TYPE_IDR;
break;
case 2:
en->picture->i_type = X264_TYPE_I;
break;
default:
en->picture->i_type = X264_TYPE_AUTO;
break;
}
if( x264_encoder_encode( en->handle, &(en->nal), &nNal, en->picture ,&pic_out) < 0 )
{
return -1;
}
for (i = 0; i < nNal; i++){
memcpy(pTmpOut, en->nal[i].p_payload, en->nal[i].i_payload);
pTmpOut += en->nal[i].i_payload;
result+=en->nal[i].i_payload;
}
return result;
}
上一篇我说了如何使用JNI进行h264编码,但是由于效率的问题 感觉并不太实用。
经过几天的折腾,并参照http://www.javaeye.com/problems/27244 大体实现的Android 的实时编码问题,但是只是思路,还没有进入代码实现的阶段。
比较重要的2个类 MediaRecorder ParcelFileDescriptor
MediaRecorder 是Android提供的进行采集编码的类,而 ParcelFileDescriptor是个用Socket实现 setOutputFile的一个类
在测试liuzongan 的程序的过程中,发现其他这位仁兄已经实现了我们需要的大部分的功能,唯一要处理的就是在服务器端的程序。
在这里我先猜测一下MediaRecorder写文件的过程
1 , MediaRecorder写入一个固定的头,估计是占位用的 都是00H( 占位的长度好像和使用的编码器有关系)
再插入 00 00 00 08 6D 64 61 74 (mdat box)
2,开始编码,并把编码后的没一个帧都添加到文件中,看到00 00 80了没, 了解h263的同志一定很熟悉,根本就是263视频的标志吗!
(如果有音频数据的话,很定会和视频数据交错在一起,但是我们的目标只是实时编码视频流,不需要处理音频,所以在初始化 MediaRecorder的时候我不不要 MediaRecorder.setAudioSource,这样就没有音频数据了。)
3 结束捕获。 MediaRecorder应该会返回到文件的头部填充文件头。但由于不是真正文件的关系,不能返回到头部,于是就把文件头附加到文件的后面了,感觉也就是 liuzongan 这仁兄提出的问题的答案了。
这个是,文件开头mdat box的长度,用于替换 00 00 00 08.
这个是3gp文件的头, 用于替换开头的占位。
4最后, MediaRecorder补充3gp文件的moov box ,这一部分已经属于3gp的媒体索引部分了,具体的内容对于我们来说已经不太关心了。大家可以参照http://www.cnitblog.com/zouzheng/archive/2007/04/04/25155.html 研究。
下开始说我们要是实现的内容了
其实有了上面的描述,大家应该能想到我们要实现的其实就是视频的分帧就可以了。。
server 端处理h263的方法
1一步 找到 mdat这个串, 后面的数据就是我们要的视频数据了,
2在流中查找 00 00 80 这个标志,每一个都是一帧数据的开始
对于 h264和mpeg4的,要预采集一次数据,这是因为mp4v的相关解码参数,在esds box中的, 而esds box 是在采集结束后才写入的,所以对于我们的程序来说需要先编码一次 获取 mp4v的相关解码参数,然后在用相同的参数进行采集, 呵呵 这样我们就能获得原始的mp4v流了
h264一样 不过是存在了 avcC box 里
最近很多同学问我SPS和PPS在那里设置,其实这篇文章只是我 上篇文章的一个简单实现
具体情况情看看上一篇
http://blog.csdn.net/zblue78/archive/2010/12/15/6078040.aspx
这里只用HTC的G7做了H264的程序,谅解!
csdn的资源慢了 粘代码算了
资源 http://download.csdn.net/source/2918751
欢迎大家经常访问我的blog http://blog.csdn.net/zblue78/
共同探讨,啥也不说的 直接上码
AndroidManifest.xml
[xhtml] view
plaincopy
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.zjzhang"
android:versionCode="1"
android:versionName="1.0">
<application android:icon="@drawable/icon" android:label="@string/app_name" android:debuggable="true">
<activity android:name=".VideoCameraActivity"
android:screenOrientation="landscape"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="3" />
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.RECORD_VIDEO"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
</manifest>
main.xml
[xhtml] view
plaincopy
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<SurfaceView
android:id="@+id/surface_camera"
android:layout_width="176px"
android:layout_height="144px"
android:layout_alignParentRight="true"
android:layout_alignParentTop="true"
/>
</LinearLayout>
[java] view
plaincopy
package com.zjzhang;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.graphics.PixelFormat;
import android.media.MediaRecorder;
import android.net.LocalServerSocket;
import android.net.LocalSocket;
import android.net.LocalSocketAddress;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
public class VideoCameraActivity extends Activity implements
SurfaceHolder.Callback, MediaRecorder.OnErrorListener,
MediaRecorder.OnInfoListener {
private static final int mVideoEncoder =MediaRecorder.VideoEncoder.H264;
private static final String TAG = "VideoCamera";
LocalSocket receiver, sender;
LocalServerSocket lss;
private MediaRecorder mMediaRecorder = null;
boolean mMediaRecorderRecording = false;
private SurfaceView mSurfaceView = null;
private SurfaceHolder mSurfaceHolder = null;
Thread t;
Context mContext = this;
RandomAccessFile raf = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera);
SurfaceHolder holder = mSurfaceView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mSurfaceView.setVisibility(View.VISIBLE);
receiver = new LocalSocket();
try {
lss = new LocalServerSocket("VideoCamera");
receiver.connect(new LocalSocketAddress("VideoCamera"));
receiver.setReceiveBufferSize(500000);
receiver.setSendBufferSize(500000);
sender = lss.accept();
sender.setReceiveBufferSize(500000);
sender.setSendBufferSize(500000);
} catch (IOException e) {
finish();
return;
}
}
@Override
public void onStart() {
super.onStart();
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onPause() {
super.onPause();
if (mMediaRecorderRecording) {
stopVideoRecording();
try {
lss.close();
receiver.close();
sender.close();
} catch (IOException e) {
e.printStackTrace();
}
}
finish();
}
private void stopVideoRecording() {
Log.d(TAG, "stopVideoRecording");
if (mMediaRecorderRecording || mMediaRecorder != null) {
if (t != null)
t.interrupt();
try {
raf.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
releaseMediaRecorder();
}
}
private void startVideoRecording() {
Log.d(TAG, "startVideoRecording");
(t = new Thread() {
public void run() {
int frame_size = 1024;
byte[] buffer = new byte[1024 * 64];
int num, number = 0;
InputStream fis = null;
try {
fis = receiver.getInputStream();
} catch (IOException e1) {
return;
}
try {
Thread.currentThread().sleep(500);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
number = 0;
releaseMediaRecorder();
//如果是H264或是MPEG_4_SP的就要在这里找到相应的设置参数的流
//avcC box H264的设置参数
//esds box MPEG_4_SP 的设置参数
//其实 如果分辨率 等数值不变的话,这些参数是不会变化的,
//那么我就只需要在第一次运行的时候确定就可以了
while (true) {
try {
num = fis.read(buffer, number, frame_size);
number += num;
if (num < frame_size) {
break;
}
} catch (IOException e) {
break;
}
}
initializeVideo();
number = 0;
// 重新启动捕获,以获取视频流
DataInputStream dis=new DataInputStream(fis);
//读取最前面的32个自己的空头
try {
dis.read(buffer,0,32);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
File file = new File("/sdcard/stream.h264");
if (file.exists())
file.delete();
raf = new RandomAccessFile(file, "rw");
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
//这些参数要对应我现在的视频设置,如果想变化的话需要去重新确定,
//当然不知道是不是不同的机器是不是一样,我这里只有一个HTC G7做测试。
byte[] h264sps={0x67,0x42,0x00,0x0C,(byte) 0x96,0x54,0x0B,0x04,(byte) 0xA2};
byte[] h264pps={0x68,(byte) 0xCE,0x38,(byte) 0x80};
byte[] h264head={0,0,0,1};
try {
raf.write(h264head);
raf.write(h264sps);
raf.write(h264head);
raf.write(h264pps);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
while (true)
{
try {
//读取每场的长度
int h264length=dis.readInt();
number =0;
raf.write(h264head);
while(number<h264length)
{
int lost=h264length-number;
num = fis.read(buffer,0,frame_size<lost?frame_size:lost);
Log.d(TAG,String.format("H264 %d,%d,%d", h264length,number,num));
number+=num;
raf.write(buffer, 0, num);
}
} catch (IOException e) {
break;
}
}
}
}).start();
}
private boolean initializeVideo() {
if (mSurfaceHolder==null)
return false;
mMediaRecorderRecording = true;
if (mMediaRecorder == null)
mMediaRecorder = new MediaRecorder();
else
mMediaRecorder.reset();
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setVideoFrameRate(20);
mMediaRecorder.setVideoSize(352, 288);
mMediaRecorder.setVideoEncoder(mVideoEncoder);
mMediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
mMediaRecorder.setMaxDuration(0);
mMediaRecorder.setMaxFileSize(0);
mMediaRecorder.setOutputFile(sender.getFileDescriptor());
try {
mMediaRecorder.setOnInfoListener(this);
mMediaRecorder.setOnErrorListener(this);
mMediaRecorder.prepare();
mMediaRecorder.start();
} catch (IOException exception) {
releaseMediaRecorder();
finish();
return false;
}
return true;
}
private void releaseMediaRecorder() {
Log.v(TAG, "Releasing media recorder.");
if (mMediaRecorder != null) {
if (mMediaRecorderRecording) {
try {
mMediaRecorder.setOnErrorListener(null);
mMediaRecorder.setOnInfoListener(null);
mMediaRecorder.stop();
} catch (RuntimeException e) {
Log.e(TAG, "stop fail: " + e.getMessage());
}
mMediaRecorderRecording = false;
}
mMediaRecorder.reset();
mMediaRecorder.release();
mMediaRecorder = null;
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
Log.d(TAG, "surfaceChanged");
mSurfaceHolder = holder;
if (!mMediaRecorderRecording) {
initializeVideo();
startVideoRecording();
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated");
mSurfaceHolder = holder;
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed");
mSurfaceHolder = null;
}
@Override
public void onInfo(MediaRecorder mr, int what, int extra) {
switch (what) {
case MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN:
Log.d(TAG, "MEDIA_RECORDER_INFO_UNKNOWN");
break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:
Log.d(TAG, "MEDIA_RECORDER_INFO_MAX_DURATION_REACHED");
break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:
Log.d(TAG, "MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED");
break;
}
}
@Override
public void onError(MediaRecorder mr, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN) {
Log.d(TAG, "MEDIA_RECORDER_ERROR_UNKNOWN");
finish();
}
}
}
很容易看懂的
简单的封装了一个JNI库
编码库在BBS里 CSDN的资源太难用了
http://www.eoeandroid.com/forum.php?mod=viewthread&tid=52739&extra=
x264的编译放方法
export ARM_ROOT=$ANDROID_NDK_ROOT
export ARM_INC=$ARM_ROOT/build/platforms/android-5/arch-arm/usr/include/
export ARM_LIB=$ARM_ROOT/build/platforms/android-5/arch-arm/usr/lib/
export ARM_TOOL=$ARM_ROOT/build/prebuilt/windows/arm-eabi-4.4.0
export ARM_LIBO=$ARM_TOOL/lib/gcc/arm-eabi/4.4.0
export PATH=$ARM_TOOL/bin:$PATH
export ARM_PRE=arm-eabi
./configure --prefix=/home/egmkang/libx264 --enable-shared /
-disable-asm --host=arm-linux --cross-prefix=arm-eabi-/
--extra-cflags=" -I$ARM_INC -fPIC -DANDROID -fpic -mthumb-interwork -ffunction-sections -funwind-tables -fstack-protector -fno-short-enums -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ -Wno-psabi -march=armv5te -mtune=xscale -msoft-float
-mthumb -Os -fomit-frame-pointer -fno-strict-aliasing -finline-limit=64 -DANDROID -Wa,--noexecstack -MMD -MP "/
--extra-ldflags="-nostdlib -Bdynamic -Wl,--no-undefined -Wl,-z,noexecstack -Wl,-z,nocopyreloc -Wl,-soname,/system/lib/libz.so -Wl,-rpath-link=$ARM_LIB,-dynamic-linker=/system/bin/linker -L$ARM_LIB -nostdlib $ARM_LIB/crtbegin_dynamic.o $ARM_LIB/crtend_android.o
-lc -lm -ldl -lgcc"
这里生成的是x264的静态库
整个工程唯一有点麻烦的是 生成 JNI 动态库的时候 报错 。。
后来发现是少链接了一个库,
于是根据x264的编译方法 在Android.mk添加一些配置就可以了。当然这就是难点,在网上查了很多都没有结果。
有些目录的参数自己调整哈
我把前面生成的libx264.a 和 x264.h 文件放到jni的libx264目录下了 有问题自己调整
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_C_INCLUDES +=$(LOCAL_PATH)/libx264/include
LOCAL_MODULE := H264Android
LOCAL_SRC_FILES := H264Android.c
LOCAL_LDFLAGS += $(LOCAL_PATH)/libx264/lib/libx264.a
LOCAL_LDLIBS := -L$(SYSROOT)/usr/lib -lgcc
include $(BUILD_SHARED_LIBRARY)
估计很多人都会发现很熟悉 嘻嘻 这个就是根据
http://www.cnblogs.com/mcodec/articles/1780598.html
改的 连文件名字都没有换!!比较懒
另: 编码的效率很低下啊
AndroidVideo.java
[java] view
plaincopy
import java.io.File;
import java.io.RandomAccessFile;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.content.res.Configuration;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
import android.view.SurfaceHolder.Callback;
import android.graphics.PixelFormat;
import android.hardware.Camera;
public class AndroidVideo extends Activity implements Callback,
Camera.PictureCallback {
private SurfaceView mSurfaceView = null;
private SurfaceHolder mSurfaceHolder = null;
private Camera mCamera = null;
private boolean mPreviewRunning = false;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.camera);
mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
if (mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(352, 288);
mCamera.setPreviewCallback(new H264Encoder(352, 288));
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (Exception ex) {
}
mCamera.startPreview();
mPreviewRunning = true;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
mCamera = null;
}
}
public void onConfigurationChanged(Configuration newConfig) {
try {
super.onConfigurationChanged(newConfig);
if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
} else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
}
} catch (Exception ex) {
}
}
}
class H264Encoder implements Camera.PreviewCallback {
long encoder=0;
RandomAccessFile raf=null;
byte[] h264Buff =null;
static {
System.loadLibrary("H264Android");
}
private H264Encoder(){};
public H264Encoder(int width, int height) {
encoder = CompressBegin(width, height);
h264Buff = new byte[width * height *8];
try {
File file = new File("/sdcard/camera.h264");
raf = new RandomAccessFile(file, "rw");
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
};
protected void finalize()
{
CompressEnd(encoder);
if (null != raf)
{
try {
raf.close();
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
}
try {
super.finalize();
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private native long CompressBegin(int width,int height);
private native int CompressBuffer(long encoder, int type,byte[] in, int insize,byte[] out);
private native int CompressEnd(long encoder);
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
int result=CompressBuffer(encoder, -1, data, data.length,h264Buff);
try {
if (result>0)
raf.write(h264Buff, 0, result);
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
}
}
H264Android.c
[cpp] view
plaincopy
#include <string.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include <arpa/inet.h>
#include <x264.h>
#define DATA_MAX 3000000
#define H264_MTU 1024
typedef struct
{
x264_param_t * param;
x264_t *handle;
x264_picture_t * picture;
x264_nal_t *nal;
} Encoder;
jlong Java_h264_com_H264Encoder_CompressBegin(JNIEnv* env, jobject thiz,
jint width, jint height) {
Encoder * en = (Encoder *) malloc(sizeof(Encoder));
en->param = (x264_param_t *) malloc(sizeof(x264_param_t));
en->picture = (x264_param_t *) malloc(sizeof(x264_picture_t));
x264_param_default(en->param); //set default param
//en->param->rc.i_rc_method = X264_RC_CQP;
en->param->i_log_level = X264_LOG_NONE;
en->param->i_width = width; //set frame width
en->param->i_height = height; //set frame height
en->param->rc.i_lookahead =0;
en->param->i_bframe=0;
en->param->i_fps_num =5;
en->param->i_fps_den = 1;
if ((en->handle = x264_encoder_open(en->param)) == 0) {
return 0;
}
/* Create a new pic */
x264_picture_alloc(en->picture, X264_CSP_I420, en->param->i_width,
en->param->i_height);
return (jlong) en;
}
jint Java_h264_com_H264Encoder_CompressEnd(JNIEnv* env, jobject thiz,jlong handle)
{
Encoder * en = (Encoder *) handle;
if(en->picture)
{
x264_picture_clean(en->picture);
free(en->picture);
en->picture = 0;
}
if(en->param)
{
free(en->param);
en->param=0;
}
if(en->handle)
{
x264_encoder_close(en->handle);
}
free(en);
return 0;
}
jint Java_h264_com_H264Encoder_CompressBuffer(JNIEnv* env, jobject thiz,jlong handle,jint type,jbyteArray in, jint insize,jbyteArray out)
{
Encoder * en = (Encoder *) handle;
x264_picture_t pic_out;
int i_data=0;
int nNal=-1;
int result=0;
int i=0,j=0;
int nPix=0;
jbyte * Buf = (jbyte*)(*env)->GetByteArrayElements(env, in, 0);
jbyte * h264Buf = (jbyte*)(*env)->GetByteArrayElements(env, out, 0);
unsigned char * pTmpOut = h264Buf;
int nPicSize=en->param->i_width*en->param->i_height;
/*
Y数据全部从在一块,UV数据使用interleave方式存储
YYYY
YYYY
UVUV
*/
jbyte * y=en->picture->img.plane[0];
jbyte * v=en->picture->img.plane[1];
jbyte * u=en->picture->img.plane[2];
memcpy(en->picture->img.plane[0],Buf,nPicSize);
for (i=0;i<nPicSize/4;i++)
{
*(u+i)=*(Buf+nPicSize+i*2);
*(v+i)=*(Buf+nPicSize+i*2+1);
}
switch (type)
{
case 0:
en->picture->i_type = X264_TYPE_P;
break;
case 1:
en->picture->i_type = X264_TYPE_IDR;
break;
case 2:
en->picture->i_type = X264_TYPE_I;
break;
default:
en->picture->i_type = X264_TYPE_AUTO;
break;
}
if( x264_encoder_encode( en->handle, &(en->nal), &nNal, en->picture ,&pic_out) < 0 )
{
return -1;
}
for (i = 0; i < nNal; i++){
memcpy(pTmpOut, en->nal[i].p_payload, en->nal[i].i_payload);
pTmpOut += en->nal[i].i_payload;
result+=en->nal[i].i_payload;
}
return result;
}
上一篇我说了如何使用JNI进行h264编码,但是由于效率的问题 感觉并不太实用。
经过几天的折腾,并参照http://www.javaeye.com/problems/27244 大体实现的Android 的实时编码问题,但是只是思路,还没有进入代码实现的阶段。
比较重要的2个类 MediaRecorder ParcelFileDescriptor
MediaRecorder 是Android提供的进行采集编码的类,而 ParcelFileDescriptor是个用Socket实现 setOutputFile的一个类
在测试liuzongan 的程序的过程中,发现其他这位仁兄已经实现了我们需要的大部分的功能,唯一要处理的就是在服务器端的程序。
在这里我先猜测一下MediaRecorder写文件的过程
1 , MediaRecorder写入一个固定的头,估计是占位用的 都是00H( 占位的长度好像和使用的编码器有关系)
再插入 00 00 00 08 6D 64 61 74 (mdat box)
2,开始编码,并把编码后的没一个帧都添加到文件中,看到00 00 80了没, 了解h263的同志一定很熟悉,根本就是263视频的标志吗!
(如果有音频数据的话,很定会和视频数据交错在一起,但是我们的目标只是实时编码视频流,不需要处理音频,所以在初始化 MediaRecorder的时候我不不要 MediaRecorder.setAudioSource,这样就没有音频数据了。)
3 结束捕获。 MediaRecorder应该会返回到文件的头部填充文件头。但由于不是真正文件的关系,不能返回到头部,于是就把文件头附加到文件的后面了,感觉也就是 liuzongan 这仁兄提出的问题的答案了。
这个是,文件开头mdat box的长度,用于替换 00 00 00 08.
这个是3gp文件的头, 用于替换开头的占位。
4最后, MediaRecorder补充3gp文件的moov box ,这一部分已经属于3gp的媒体索引部分了,具体的内容对于我们来说已经不太关心了。大家可以参照http://www.cnitblog.com/zouzheng/archive/2007/04/04/25155.html 研究。
下开始说我们要是实现的内容了
其实有了上面的描述,大家应该能想到我们要实现的其实就是视频的分帧就可以了。。
server 端处理h263的方法
1一步 找到 mdat这个串, 后面的数据就是我们要的视频数据了,
2在流中查找 00 00 80 这个标志,每一个都是一帧数据的开始
对于 h264和mpeg4的,要预采集一次数据,这是因为mp4v的相关解码参数,在esds box中的, 而esds box 是在采集结束后才写入的,所以对于我们的程序来说需要先编码一次 获取 mp4v的相关解码参数,然后在用相同的参数进行采集, 呵呵 这样我们就能获得原始的mp4v流了
h264一样 不过是存在了 avcC box 里
最近很多同学问我SPS和PPS在那里设置,其实这篇文章只是我 上篇文章的一个简单实现
具体情况情看看上一篇
http://blog.csdn.net/zblue78/archive/2010/12/15/6078040.aspx
这里只用HTC的G7做了H264的程序,谅解!
csdn的资源慢了 粘代码算了
资源 http://download.csdn.net/source/2918751
欢迎大家经常访问我的blog http://blog.csdn.net/zblue78/
共同探讨,啥也不说的 直接上码
AndroidManifest.xml
[xhtml] view
plaincopy
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.zjzhang"
android:versionCode="1"
android:versionName="1.0">
<application android:icon="@drawable/icon" android:label="@string/app_name" android:debuggable="true">
<activity android:name=".VideoCameraActivity"
android:screenOrientation="landscape"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="3" />
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.RECORD_VIDEO"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
</manifest>
main.xml
[xhtml] view
plaincopy
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<SurfaceView
android:id="@+id/surface_camera"
android:layout_width="176px"
android:layout_height="144px"
android:layout_alignParentRight="true"
android:layout_alignParentTop="true"
/>
</LinearLayout>
[java] view
plaincopy
package com.zjzhang;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.graphics.PixelFormat;
import android.media.MediaRecorder;
import android.net.LocalServerSocket;
import android.net.LocalSocket;
import android.net.LocalSocketAddress;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
public class VideoCameraActivity extends Activity implements
SurfaceHolder.Callback, MediaRecorder.OnErrorListener,
MediaRecorder.OnInfoListener {
private static final int mVideoEncoder =MediaRecorder.VideoEncoder.H264;
private static final String TAG = "VideoCamera";
LocalSocket receiver, sender;
LocalServerSocket lss;
private MediaRecorder mMediaRecorder = null;
boolean mMediaRecorderRecording = false;
private SurfaceView mSurfaceView = null;
private SurfaceHolder mSurfaceHolder = null;
Thread t;
Context mContext = this;
RandomAccessFile raf = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera);
SurfaceHolder holder = mSurfaceView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mSurfaceView.setVisibility(View.VISIBLE);
receiver = new LocalSocket();
try {
lss = new LocalServerSocket("VideoCamera");
receiver.connect(new LocalSocketAddress("VideoCamera"));
receiver.setReceiveBufferSize(500000);
receiver.setSendBufferSize(500000);
sender = lss.accept();
sender.setReceiveBufferSize(500000);
sender.setSendBufferSize(500000);
} catch (IOException e) {
finish();
return;
}
}
@Override
public void onStart() {
super.onStart();
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onPause() {
super.onPause();
if (mMediaRecorderRecording) {
stopVideoRecording();
try {
lss.close();
receiver.close();
sender.close();
} catch (IOException e) {
e.printStackTrace();
}
}
finish();
}
private void stopVideoRecording() {
Log.d(TAG, "stopVideoRecording");
if (mMediaRecorderRecording || mMediaRecorder != null) {
if (t != null)
t.interrupt();
try {
raf.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
releaseMediaRecorder();
}
}
private void startVideoRecording() {
Log.d(TAG, "startVideoRecording");
(t = new Thread() {
public void run() {
int frame_size = 1024;
byte[] buffer = new byte[1024 * 64];
int num, number = 0;
InputStream fis = null;
try {
fis = receiver.getInputStream();
} catch (IOException e1) {
return;
}
try {
Thread.currentThread().sleep(500);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
number = 0;
releaseMediaRecorder();
//如果是H264或是MPEG_4_SP的就要在这里找到相应的设置参数的流
//avcC box H264的设置参数
//esds box MPEG_4_SP 的设置参数
//其实 如果分辨率 等数值不变的话,这些参数是不会变化的,
//那么我就只需要在第一次运行的时候确定就可以了
while (true) {
try {
num = fis.read(buffer, number, frame_size);
number += num;
if (num < frame_size) {
break;
}
} catch (IOException e) {
break;
}
}
initializeVideo();
number = 0;
// 重新启动捕获,以获取视频流
DataInputStream dis=new DataInputStream(fis);
//读取最前面的32个自己的空头
try {
dis.read(buffer,0,32);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
File file = new File("/sdcard/stream.h264");
if (file.exists())
file.delete();
raf = new RandomAccessFile(file, "rw");
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
//这些参数要对应我现在的视频设置,如果想变化的话需要去重新确定,
//当然不知道是不是不同的机器是不是一样,我这里只有一个HTC G7做测试。
byte[] h264sps={0x67,0x42,0x00,0x0C,(byte) 0x96,0x54,0x0B,0x04,(byte) 0xA2};
byte[] h264pps={0x68,(byte) 0xCE,0x38,(byte) 0x80};
byte[] h264head={0,0,0,1};
try {
raf.write(h264head);
raf.write(h264sps);
raf.write(h264head);
raf.write(h264pps);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
while (true)
{
try {
//读取每场的长度
int h264length=dis.readInt();
number =0;
raf.write(h264head);
while(number<h264length)
{
int lost=h264length-number;
num = fis.read(buffer,0,frame_size<lost?frame_size:lost);
Log.d(TAG,String.format("H264 %d,%d,%d", h264length,number,num));
number+=num;
raf.write(buffer, 0, num);
}
} catch (IOException e) {
break;
}
}
}
}).start();
}
private boolean initializeVideo() {
if (mSurfaceHolder==null)
return false;
mMediaRecorderRecording = true;
if (mMediaRecorder == null)
mMediaRecorder = new MediaRecorder();
else
mMediaRecorder.reset();
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setVideoFrameRate(20);
mMediaRecorder.setVideoSize(352, 288);
mMediaRecorder.setVideoEncoder(mVideoEncoder);
mMediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
mMediaRecorder.setMaxDuration(0);
mMediaRecorder.setMaxFileSize(0);
mMediaRecorder.setOutputFile(sender.getFileDescriptor());
try {
mMediaRecorder.setOnInfoListener(this);
mMediaRecorder.setOnErrorListener(this);
mMediaRecorder.prepare();
mMediaRecorder.start();
} catch (IOException exception) {
releaseMediaRecorder();
finish();
return false;
}
return true;
}
private void releaseMediaRecorder() {
Log.v(TAG, "Releasing media recorder.");
if (mMediaRecorder != null) {
if (mMediaRecorderRecording) {
try {
mMediaRecorder.setOnErrorListener(null);
mMediaRecorder.setOnInfoListener(null);
mMediaRecorder.stop();
} catch (RuntimeException e) {
Log.e(TAG, "stop fail: " + e.getMessage());
}
mMediaRecorderRecording = false;
}
mMediaRecorder.reset();
mMediaRecorder.release();
mMediaRecorder = null;
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
Log.d(TAG, "surfaceChanged");
mSurfaceHolder = holder;
if (!mMediaRecorderRecording) {
initializeVideo();
startVideoRecording();
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated");
mSurfaceHolder = holder;
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed");
mSurfaceHolder = null;
}
@Override
public void onInfo(MediaRecorder mr, int what, int extra) {
switch (what) {
case MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN:
Log.d(TAG, "MEDIA_RECORDER_INFO_UNKNOWN");
break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:
Log.d(TAG, "MEDIA_RECORDER_INFO_MAX_DURATION_REACHED");
break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:
Log.d(TAG, "MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED");
break;
}
}
@Override
public void onError(MediaRecorder mr, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN) {
Log.d(TAG, "MEDIA_RECORDER_ERROR_UNKNOWN");
finish();
}
}
}
相关文章推荐
- Android 的视频编码 H263 MP4V H264
- 文章【Android 的视频编码 H263 MP4V H264】的代码实现
- 【Android 的视频编码 H263 MP4V H264】的代码实现
- 文章【Android 的视频编码 H263 MP4V H264】的代码实现
- 【Android 的视频编码 H263 MP4V H264】的代码实现 .
- Android 的视频编码 H263 MP4V H264
- 文章【Android 的视频编码 H263 MP4V H264】的代码实现
- 文章【Android 的视频编码 H263 MP4V H264】的代码实现
- 文章【Android 的视频编码 H263 MP4V H264】的代码实现
- Android 的视频编码 H263 MP4V H264的代码实现
- Android 的视频编码 H263 MP4V H264
- Android 的视频编码 H263 MP4V H264
- android实时录制视频h263编码通过rtp打包udp发送到服务器
- Android 实时视频编码—H.264硬编码
- Android利用mediacodec进行视频H264编码解码播放
- Android MediaRecorder H264 编码实时视频流不能播放(readInt()值太大)以及如何将流实时上传到服务器的方法
- Android视频采集编码颜色格式选择
- Android使用系统API进行音视频编码
- 【流媒體】Android 实时视频采集/编码/传输/解码/播放—方案调研(初)
- Android录屏功能的实现,MediaCodec编码为H264,WebSocket实时传输视频流