Android录屏功能的实现,MediaCodec编码为H264,WebSocket实时传输视频流
2018-01-31 19:13
4423 查看
投屏功能的实现,MediaCodec编解码,MediaProjection获取屏幕视频,WebSocket传视频数据,SurfaceView显示
同时附上摄像头采集MediaCodec实时编解码链接
1.MediaCodec编码:
private void initEncoder (){ MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(KEY_BIT_RATE, width * height); format.setInteger(KEY_FRAME_RATE, 20); format.setInteger(KEY_I_FRAME_INTERVAL, 1); MediaCodec vencoder = null; try { vencoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC); } catch (IOException e) { e.printStackTrace(); } vencoder.configure(format, null, null, CONFIGURE_FLAG_ENCODE); Surface surface = vencoder.createInputSurface(); mVirtualDisplay = mMediaProjection.createVirtualDisplay("-display", width, height, 1, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, surface, null, null); mEncoder = vencoder; } private void onEncodedAvcFrame(ByteBuffer byteBuffer, final MediaCodec.BufferInfo vBufferInfo) { int offset = 4; //判断帧的类型 if (byteBuffer.get(2) == 0x01) { offset = 3; } int type = byteBuffer.get(offset) & 0x1f; /*如果送来的流的第一帧Frame有pps和sps,可以不需要配置format.setByteBuffer的”csd-0” (sps) 和”csd-1”(pps); 否则必须配置相应的pps和sps,通常情况下sps和pps如下 SPS帧和 PPS帧合在了一起发送,PS为 [4,len-8] PPS为后4个字节*/ if (type == NAL_SPS) { sps_pps_buf = new byte[vBufferInfo.size]; byteBuffer.get(sps_pps_buf); } else if (type == NAL_SLICE /* || type == NAL_SLICE_IDR */) { final byte[] bytes = new byte[vBufferInfo.size]; byteBuffer.get(bytes); if (null != screenRecorderListener) { screenRecorderListener.onScreenData(bytes); } Log.v(TAG, "视频数据 " + Arrays.toString(bytes)); } else if (type == NAL_SLICE_IDR) { // I帧,前面添加sps和pps final byte[] bytes = new byte[vBufferInfo.size]; byteBuffer.get(bytes); byte[] newBuf = new byte[sps_pps_buf.length + bytes.length]; System.arraycopy(sps_pps_buf, 0, newBuf, 0, sps_pps_buf.length); System.arraycopy(bytes, 0, newBuf, sps_pps_buf.length, bytes.length); if (null != screenRecorderListener) { screenRecorderListener.onScreenData(newBuf); } Log.v(TAG, "sps pps " + Arrays.toString(sps_pps_buf)); Log.v(TAG, "视频数据 " + Arrays.toString(newBuf)); } }
2.MediaProjection获取屏幕数据
public void initScreen() { mMediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE); Intent captureIntent = mMediaProjectionManager.createScreenCaptureIntent(); startActivityForResult(captureIntent, REQUEST_CODE); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode != RESULT_OK || requestCode != REQUEST_CODE) return; mediaProjection = mMediaProjectionManager.getMediaProjection(resultCode, data); if (mediaProjection == null) { return; } DisplayMetrics dm = getResources().getDisplayMetrics(); mScreenRecorder = new ScreenRecorder(dm.widthPixels, dm.heightPixels, mediaProjection); mScreenRecorder.setScreenRecorderListener(this); //mScreenRecorder.startEncode(); } mVirtualDisplay = mMediaProjection.createVirtualDisplay("-display", width, height, 1, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, surface, null, null);
3.WebSocket传输数据
服务端: private class MyWebSocketServer extends WebSocketServer { public MyWebSocketServer(int port) { super(new InetSocketAddress(port)); } @Override public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) { mWebSocket = webSocket; Log.v(TAG, "onOpen"); } @Override public void onClose(WebSocket webSocket, int i, String s, boolean b) { Log.v(TAG, "onClose"); } @Override public void onMessage(WebSocket webSocket, String s) { Log.v(TAG, "onMessage" + s); } @Override public void onError(WebSocket webSocket, Exception e) { Log.v(TAG, "onError" ); e.printStackTrace(); } } 客户端: private class MyWebSocketClient extends WebSocketClient { public MyWebSocketClient(URI serverURI) { super(serverURI); } @Override public void onOpen(ServerHandshake serverHandshake) { Log.v(TAG, "连接成功"); runOnUiThread(new Runnable() { @Override public void run() { connectBtn.setVisibility(View.INVISIBLE); Toast.makeText(MainActivity.this, "连接成功", Toast.LENGTH_SHORT).show(); } }); } @Override public void onMessage(String s) { Log.v(TAG, "onMessage:" + s); } @Override public void onMessage(ByteBuffer bytes) { byte[] buf = new byte[bytes.remaining()]; bytes.get(buf); Log.v(TAG, "onMessage+length: " + buf.length); onFrame(buf, 0, buf.length); } @Override public void onClose(int i, String s, boolean b) { Log.v(TAG, "onClose"); runOnUiThread(new Runnable() { @Override public void run() { connectBtn.setVisibility(View.VISIBLE); Toast.makeText(MainActivity.this, "断开连接", Toast.LENGTH_SHORT).show(); } }); } @Override public void onError(Exception e) { Log.v(TAG, "onError " + e); } }
4.MediaCodec解码
public void initDecoder() { try { mCodec = MediaCodec.createDecoderByType(MIME_TYPE); final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT); format.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_WIDTH * VIDEO_HEIGHT); format.setInteger(MediaFormat.KEY_FRAME_RATE, 20); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); byte[] header_sps = {0, 0, 0, 1, 103, 66, -128, 31, -38, 2, -48, 40, 104, 6, -48, -95, 53}; byte[] header_pps = {0, 0 ,0, 1, 104, -50, 6, -30}; /* 横屏 byte[] header_sps = {0, 0, 0, 1, 103, 66, -128, 31, -38, 1, 64, 22, -24, 6, -48, -95, 53}; byte[] header_pps = {0, 0 ,0, 1, 104, -50, 6, -30};*/ format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps)); format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps)); acb9 mCodec.configure(format, mSurfaceView.getHolder().getSurface(), null, 0); mCodec.start(); } catch (IOException e) { e.printStackTrace(); } } public boolean onFrame(byte[] buf, int offset, int length) { ByteBuffer[] inputBuffers = mCodec.getInputBuffers(); int inputBufferIndex = mCodec.dequeueInputBuffer(100); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); inputBuffer.put(buf, offset, length); mCodec.queueInputBuffer(inputBufferIndex, 0, length, System.currentTimeMillis(), 0); } else { return false; } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 100); while (outputBufferIndex >= 0) { mCodec.releaseOutputBuffer(outputBufferIndex, true); outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0); } return true; }
5.SurfaceView显示视频
mSurfaceView = (SurfaceView) findViewById(R.id.surfaceView1); DisplayMetrics dm = getResources().getDisplayMetrics(); mSurfaceView.getHolder().setFixedSize(dm.widthPixels, dm.heightPixels); mSurfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); mCodec.configure(format, mSurfaceView.getHolder().getSurface(),
DEMO源码
相关文章推荐
- H264实时编码及NALU,RTP传输----按照RFC3984协议实现H264视频流媒体
- 通过(Node Js||.Net)基于HTML5的WebSocket实现实时视频文字传输1
- 通过(Node Js||.Net)基于HTML5的WebSocket实现实时视频文字传输(上)
- 通过(Node Js||.Net)基于HTML5的WebSocket实现实时视频文字传输(上)
- Android利用mediacodec进行视频H264编码解码播放
- [转]通过(Node Js||.Net)基于HTML5的WebSocket实现实时视频文字传输(上)
- 【流媒體】Android 实时视频采集/编码/传输/解码/播放—方案调研(初)
- iOS 简单的视频直播功能开发(实时视音频流录制编码+RTMP传输+实时拉流解码播放)
- iOS 简单的视频直播功能开发(实时视音频流录制编码+RTMP传输+实时拉流解码播放)
- Android 实时视频采集/编码/传输/解码/播放—方案调研
- Android实现录屏直播(三)MediaProjection + VirtualDisplay + librtmp + MediaCodec实现视频编码并推流到rtmp服务器
- Android实现录屏直播(三)MediaProjection + VirtualDisplay + librtmp + MediaCodec实现视频编码并推流到rtmp服务器
- 【流媒體】Android 实时视频采集/编码/传输/解码/播放—方案调研(初)
- 如何实现android实时视频通话功能
- iOS 简单的视频直播功能开发(实时视音频流录制编码+RTMP传输+实时拉流解码播放)
- 通过(Node Js||.Net)基于HTML5的WebSocket实现实时视频文字传输(上)
- Android 实时视频采集/编码/传输/解码/播放 方案调研
- 【流媒體】Android 实时视频采集/编码/传输/解码/播放—方案调研(初)
- Android 实现H264编码,解码。并通过WiFi传输实现视频监控。
- Android 实时视频采集/编码/传输/解码/播放—方案调研