car-eye 车载流媒体开发中数据采集和编码传输
2018-01-30 16:29
381 查看
1
车载流媒体作为一个热点越来越引起大家的重视,car-eye 行车记录仪,是基于RTSP协议开发的开源android程序,支持最多四路视频录像,支持最多四路视频上传。传输视频采用RTSP协议。首先我们介绍一下,视频数据从摄像头获取到编码,到传输给流媒体服务器的过程:
1. 摄像头数据通过priewcallback获得数据为YV12的数据
设置好基本的预览的参数:
parameters.setPreviewSize(Constants.RECORD_VIDEO_WIDTH, Constants.RECORD_VIDEO_HEIGHT);
parameters.setPreviewFpsRange(20,20);
camera[index].startPreview();
在需要数据的时候打开预览callback:
camera[index].setPreviewCallback(preview[index]);
preview[0] = new PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera1) {
// TODO Auto-generated method stub
MediaCodecManager.getInstance().onPreviewFrameUpload(data,0,camera[0]);
}
};
public void onPreviewFrameUpload(byte[] data,int index,Camera camera){
if (data == null ) {
camera.addCallbackBuffer(data);
return;
}
Camera.Size previewSize = camera.getParameters().getPreviewSize();
if (data.length != Constants.UPLOAD_VIDEO_HEIGHT * Constants.UPLOAD_VIDEO_WIDTH * 3 / 2) {
camera.addCallbackBuffer(data);
Log.d("CMD", " onPreviewFrameUpload return"+data.length);
return;
}
MainService.getInstance().SetPreviewValid(index);
if(mVC[index]!= null)
{
mVC[index].onVideo(data, previewFormat);
}else
{
camera.setPreviewCallback(null);
4000
}
camera.addCallbackBuffer(data);
}
}
2. 讲H264的数据流,传输到编码器进行编码传输:
public int onVideo(byte[] data, int format) {
if (!mVideoStarted)return 0;
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();
int bufferIndex = mMediaCodec.dequeueInputBuffer(0);
// MainService.mEasyPusher.addwatermarkScale(data, Scaler, data.length,Constants.RECORD_VIDEO_WIDTH,Constants.RECORD_VIDEO_HEIGHT,Constants.UPLOAD_VIDEO_WIDTH,Constants.UPLOAD_VIDEO_HEIGHT);
if (bufferIndex >= 0) {
ByteBuffer buffer = null;
buffer = inputBuffers[bufferIndex];
buffer.clear();
buffer.put(data);
buffer.clear();
mMediaCodec.queueInputBuffer(bufferIndex, 0, data.length, System.nanoTime() / 1000, 0);
}
/*if (time > 0)
try {
Thread.sleep(time / 2);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
lastPush = System.currentTimeMillis();*/
return 0;
}
@Override
public void run(){
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = 0;
byte[] mPpsSps = new byte[0];
byte[]h264 = new byte[mWidth*mHeight*3/2];
do {
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 30000);
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
/* EasyMuxer muxer = mMuxer;
if (muxer != null) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mMediaCodec.getOutputFormat();
muxer.addTrack(newFormat, true);
}*/
} else if (outputBufferIndex < 0) {
// let's ignore it
} else {
ByteBuffer outputBuffer;
outputBuffer = outputBuffers[outputBufferIndex];
EasyMuxer muxer = mMuxer;
if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true);
}
boolean sync = false;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {// sps
sync = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (!sync) {
byte[] temp = new byte[bufferInfo.size];
outputBuffer.get(temp);
mPpsSps = temp;
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
continue;
} else {
mPpsSps = new byte[0];
}
}
sync |= (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
int len = mPpsSps.length + bufferInfo.size;
if (len > h264.length){
h264 = new byte[len];
}
if (sync) {
System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length);
outputBuffer.get(h264, mPpsSps.length, bufferInfo.size);
int Time =(int)( ( bufferInfo.presentationTimeUs / 1000) &0xffffff);
mPusher.SendBuffer(0, h264, mPpsSps.length + bufferInfo.size, 0, m_index);
}else{
int Time =(int)( ( bufferInfo.presentationTimeUs / 1000) &0xffffff);
outputBuffer.get(h264, 0, bufferInfo.size);
mPusher.SendBuffer(0, h264, bufferInfo.size, 0, m_index);
}
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
}
}
while (mVideoStarted);
}
其中 mPusher.SendBuffer(0, h264, mPpsSps.length + bufferInfo.size, 0, m_index); 讲获得H264的流传输到多媒体平台。
相关代码下载car-eye开源平台网址:https://github.com/Car-eye-admin/
有关技术咨询可以加群590411159。
转载http://blog.csdn.net/car_eye/article/details/79206146
加入CSDN,享受更精准的内容推荐,与500万程序员共同成长!
登录
注册
车载流媒体作为一个热点越来越引起大家的重视,car-eye 行车记录仪,是基于RTSP协议开发的开源android程序,支持最多四路视频录像,支持最多四路视频上传。传输视频采用RTSP协议。首先我们介绍一下,视频数据从摄像头获取到编码,到传输给流媒体服务器的过程:
1. 摄像头数据通过priewcallback获得数据为YV12的数据
设置好基本的预览的参数:
parameters.setPreviewSize(Constants.RECORD_VIDEO_WIDTH, Constants.RECORD_VIDEO_HEIGHT);
parameters.setPreviewFpsRange(20,20);
camera[index].startPreview();
在需要数据的时候打开预览callback:
camera[index].setPreviewCallback(preview[index]);
preview[0] = new PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera1) {
// TODO Auto-generated method stub
MediaCodecManager.getInstance().onPreviewFrameUpload(data,0,camera[0]);
}
};
public void onPreviewFrameUpload(byte[] data,int index,Camera camera){
if (data == null ) {
camera.addCallbackBuffer(data);
return;
}
Camera.Size previewSize = camera.getParameters().getPreviewSize();
if (data.length != Constants.UPLOAD_VIDEO_HEIGHT * Constants.UPLOAD_VIDEO_WIDTH * 3 / 2) {
camera.addCallbackBuffer(data);
Log.d("CMD", " onPreviewFrameUpload return"+data.length);
return;
}
MainService.getInstance().SetPreviewValid(index);
if(mVC[index]!= null)
{
mVC[index].onVideo(data, previewFormat);
}else
{
camera.setPreviewCallback(null);
4000
}
camera.addCallbackBuffer(data);
}
}
2. 讲H264的数据流,传输到编码器进行编码传输:
public int onVideo(byte[] data, int format) {
if (!mVideoStarted)return 0;
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();
int bufferIndex = mMediaCodec.dequeueInputBuffer(0);
// MainService.mEasyPusher.addwatermarkScale(data, Scaler, data.length,Constants.RECORD_VIDEO_WIDTH,Constants.RECORD_VIDEO_HEIGHT,Constants.UPLOAD_VIDEO_WIDTH,Constants.UPLOAD_VIDEO_HEIGHT);
if (bufferIndex >= 0) {
ByteBuffer buffer = null;
buffer = inputBuffers[bufferIndex];
buffer.clear();
buffer.put(data);
buffer.clear();
mMediaCodec.queueInputBuffer(bufferIndex, 0, data.length, System.nanoTime() / 1000, 0);
}
/*if (time > 0)
try {
Thread.sleep(time / 2);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
lastPush = System.currentTimeMillis();*/
return 0;
}
@Override
public void run(){
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = 0;
byte[] mPpsSps = new byte[0];
byte[]h264 = new byte[mWidth*mHeight*3/2];
do {
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 30000);
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
/* EasyMuxer muxer = mMuxer;
if (muxer != null) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mMediaCodec.getOutputFormat();
muxer.addTrack(newFormat, true);
}*/
} else if (outputBufferIndex < 0) {
// let's ignore it
} else {
ByteBuffer outputBuffer;
outputBuffer = outputBuffers[outputBufferIndex];
EasyMuxer muxer = mMuxer;
if (muxer != null) {
muxer.pumpStream(outputBuffer, bufferInfo, true);
}
boolean sync = false;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {// sps
sync = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (!sync) {
byte[] temp = new byte[bufferInfo.size];
outputBuffer.get(temp);
mPpsSps = temp;
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
continue;
} else {
mPpsSps = new byte[0];
}
}
sync |= (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
int len = mPpsSps.length + bufferInfo.size;
if (len > h264.length){
h264 = new byte[len];
}
if (sync) {
System.arraycopy(mPpsSps, 0, h264, 0, mPpsSps.length);
outputBuffer.get(h264, mPpsSps.length, bufferInfo.size);
int Time =(int)( ( bufferInfo.presentationTimeUs / 1000) &0xffffff);
mPusher.SendBuffer(0, h264, mPpsSps.length + bufferInfo.size, 0, m_index);
}else{
int Time =(int)( ( bufferInfo.presentationTimeUs / 1000) &0xffffff);
outputBuffer.get(h264, 0, bufferInfo.size);
mPusher.SendBuffer(0, h264, bufferInfo.size, 0, m_index);
}
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
}
}
while (mVideoStarted);
}
其中 mPusher.SendBuffer(0, h264, mPpsSps.length + bufferInfo.size, 0, m_index); 讲获得H264的流传输到多媒体平台。
相关代码下载car-eye开源平台网址:https://github.com/Car-eye-admin/
有关技术咨询可以加群590411159。
转载http://blog.csdn.net/car_eye/article/details/79206146
加入CSDN,享受更精准的内容推荐,与500万程序员共同成长!
登录
注册
相关文章推荐
- car-eye 车载流媒体开发中数据采集和编码传输
- <车载物联网项目,视频采集传输部分,第三天> 使用ffserver输出视频文件流,并且使用使用命令行终端播放视频文件
- Spring构建Web应用部署至Tomcat开发过程中的数据编码问题
- DriverStudio开发PCI设备DMA数据传输
- TCP/UDP传输摄像头采集的数据(opencv)
- 贴吧系统开发中遇到的几个问题——数据传输限制
- [体感游戏] 1、MPU6050数据采集传输与可视化
- STM32的ADC1+ADC3 16路采集 DMA传输 数据不错位
- JXTA下的媒体数据传输 - [Matrix - 与 Java 共舞]
- 关于http接口开发中json格式数据编码问题处理
- 用jrtplib对媒体数据进行传输
- DriverStudio开发PCI设备DMA数据传输
- android开发 在JNI函数中引用Java类,将cxx复杂数据转换成jobjectArray,实现从JNI到Java的数据传输
- 在VB下设计开发实时的数据采集曲线
- Android开发——不同Activity之间的切换和数据传输
- 记一次WEB数据采集程序开发经历——对付简单的动态加载
- 基于HTML5 Bootstrap搭建的后台模板,分页,模糊查询已经全部JS实现,无需编码,嵌入数据即可开发,内置8款皮肤,欧美风格,非常好用!
- WCF分布式开发常见错误(2)无法从传输连接中读取数据: 远程主机强迫关闭了一个现有的连接
- android平台Camera采集数据ffmpeg进行编码探究
- JAX-WS Web 服务开发调用和数据传输分析