您的位置:首页 > 移动开发 > Android开发

Android NDK开发 native层获取Surface并显示图像 思路过程

2013-03-12 21:19 483 查看
最近参加一个项目,需在 native层获取Surface并显示图像,在网上搜了N多资料

总算是找到了相关代码 实现,但是编译连接却是个问题。因为刚学android编程不到一月,而且遇到以前没遇到过的情况。

即 要用到android源代码和库(.so)进行编译链接

先把代码贴出来吧

首先是jni层代码

#define TAG "DisplayYUV"
//#define BUFSIZE 720*1038*2
#define BUFSIZE 176*144
#include <string.h>
#include <android/log.h>
#include <jni.h>
#include <android/bitmap.h>
//#include <android/surface.h>
#include <gui/Surface.h>
#include <ui/Region.h>
#include <utils/RefBase.h>
#include <cstdio>
using namespace android;
static sp<Surface> native_surface;
static android::Surface::SurfaceInfo info;
static android::Region dirtyRegion;
char buf[BUFSIZE] = {0};
char * bufYUV = NULL;
FILE *fp = NULL;
jsize len = 0;
extern "C"
{

int setSurface(JNIEnv *env, jobject jsurface, jint version);
void Java_com_example_displayyuv_MainActivity_test(JNIEnv *env, jobject thiz, jobject jsurface)
{
__android_log_print(ANDROID_LOG_INFO, TAG, "get good cpp test surface!");
setSurface(env, jsurface, 9);
//dirtyRegion.set(android::Rect(0x3fff, 0x3fff));
dirtyRegion.set(android::Rect(0, 0, 176, 144));
status_t err = native_surface->lock(&info, &dirtyRegion);
int sformat;
switch(info.format)
{
case PIXEL_FORMAT_RGBA_8888:
case PIXEL_FORMAT_RGBX_8888:
case PIXEL_FORMAT_BGRA_8888:
sformat = 4;
break;
case PIXEL_FORMAT_RGB_888:
sformat = 3;
break;

case PIXEL_FORMAT_RGB_565:
case PIXEL_FORMAT_RGBA_5551:
case PIXEL_FORMAT_RGBA_4444:
sformat = 2;
break;

default:
sformat = -1;
}
sprintf(buf, "width - %d -- height %d-- format %d---Locked -- %d", info.w, info.h, sformat, err);
__android_log_print(ANDROID_LOG_INFO, TAG, buf);
memset(buf, 0x77, BUFSIZE);
memcpy(info.bits, buf, BUFSIZE);
//memcpy(info.bits, bufYUV, len);
native_surface->unlockAndPost();
}

static android::Surface* getNativeSurface(JNIEnv* env, jobject jsurface, jint version)
{
jclass clazz = env->FindClass("android/view/Surface");
jfieldID field_surface;
if(version <=8)
{
field_surface = env->GetFieldID(clazz, "mSurface", "I");
}
else
field_surface = env->GetFieldID(clazz, ANDROID_VIEW_SURFACE_JNI_ID, "I");

if (field_surface == NULL)
{
return NULL;
}
return (android::Surface *) env->GetIntField(jsurface, field_surface);
}

int setSurface(JNIEnv *env, jobject jsurface, jint version)
{
native_surface = getNativeSurface(env, jsurface, version);

if(android::Surface::isValid(native_surface))
{
__android_log_print(ANDROID_LOG_INFO, "libjni", "native_surface is valid");
return 1;
}
else
__android_log_print(ANDROID_LOG_ERROR, "libjni", "native_surface is invalid");

return 0;
}
void Java_com_example_displayyuv_MainActivity_drawYUV(JNIEnv *env, jobject thiz, jintArray yuvData)
{
len = env->GetArrayLength(yuvData);
jint *byteBuf = env->GetIntArrayElements(yuvData, 0);
bufYUV = (char *)byteBuf;

}
}

然后是java层代码

package com.example.displayyuv;

import java.io.File;
import java.io.FileInputStream;

import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;

public class MainActivity extends Activity {

private static final String ACTIVITY_TAG = "displayYUV";
final String FILE_NAME = "/akiyo_qcif.yuv";
final int width = 176;
final int height = 144;
final int size = (int) (width * height * 1.5);
byte[] yuvBuffer = new byte[size];
// final Builder builder = new AlertDialog.Builder(this);

private SurfaceHolder holder;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// TextView tv = new TextView(this);
// tv.setText( stringFromJNI() );
// setContentView(tv);
// My Code

Log.v(ACTIVITY_TAG, stringFromJNI());
SurfaceView surface = (SurfaceView) findViewById(R.id.show);
// 初始化SurfaceHolder对象
holder = surface.getHolder();
holder.addCallback(new Callback() {
@Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2,
int arg3) {
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
setVideoSurface(holder.getSurface());
//test(holder.getSurface());
if (read(yuvBuffer)) {
// 设置对话框的图标
// builder.setIcon(R.drawable.tools);
// 设置对话框的标题
// builder.setTitle("自定义普通对话框");
// // 设置对话框显示的内容
// builder.setMessage("一个简单的提示对话框");
// //创建、并显示对话框
// builder.create().show();
// }
// 锁定整个SurfaceView
// Canvas canvas = holder.lockCanvas();
// canvas.drawColor(Color.WHITE);
// // 绘制背景
// // Bitmap back = BitmapFactory.decodeResource(
// // SurfaceViewTest.this.getResources(), R.drawable.sun);
// // 绘制背景
// // canvas.drawBitmap(back, 0, 0, null);
// // 绘制完成,释放画布,提交修改
// holder.unlockCanvasAndPost(canvas);
// yourFunction(yuvBuffer, width, height);
// //重新锁一次,"持久化"上次所绘制的内容
// holder.lockCanvas(new Rect(0, 0, 0, 0));
// holder.unlockCanvasAndPost(canvas);
int[] mIntArray = new int[width * height];

// Decode Yuv data to integer array
// decodeYUV420SP(mIntArray, data, mWidth, mHeight);
convertYUV420_NV21toRGB8888(mIntArray, yuvBuffer, width, height);
drawYUV(mIntArray);
test(holder.getSurface());
}
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
}

private boolean read(byte[] yuvArray) {
try {
// 如果手机插入了SD卡,而且应用程序具有访问SD的权限
if (Environment.getExternalStorageState().equals(
Environment.MEDIA_MOUNTED)) {
// 获取SD卡对应的存储目录
File sdCardDir = Environment.getExternalStorageDirectory();
// 获取指定文件对应的输入流
FileInputStream fis = new FileInputStream(
sdCardDir.getCanonicalPath() + FILE_NAME);
fis.read(yuvArray, 0, size);
return true;
} else {
return false;
}
}

catch (Exception e) {
e.printStackTrace();
return false;
}
}

void yourFunction(byte[] data, int mWidth, int mHeight) {

int[] mIntArray = new int[mWidth * mHeight];

// Decode Yuv data to integer array
// decodeYUV420SP(mIntArray, data, mWidth, mHeight);
convertYUV420_NV21toRGB8888(mIntArray, data, mWidth, mHeight);

// Initialize the bitmap, with the replaced color
Bitmap bmp = Bitmap.createBitmap(mIntArray, mWidth, mHeight,
Bitmap.Config.ARGB_8888);

Canvas canvas = holder.lockCanvas();
canvas.drawBitmap(bmp, 0, 0, null);
holder.unlockCanvasAndPost(canvas);
// Draw the bitmap with the replaced color
// iv.setImageBitmap(bmp);
// ByteArrayOutputStream out = new ByteArrayOutputStream();
// YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, width,
// height, null);
// yuvImage.compressToJpeg(new Rect(0, 0, width, height), 50, out);
// byte[] imageBytes = out.toByteArray();
// Bitmap bmp = BitmapFactory.decodeByteArray(imageBytes, 0,
// imageBytes.length);
// Canvas canvas = holder.lockCanvas();
// canvas.drawBitmap(bmp, 0, 0, null);
// holder.unlockCanvasAndPost(canvas);
}

static public void decodeYUV420SP(int[] rgba, byte[] yuv420sp, int width,
int height) {
final int frameSize = width * height;

for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}

int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);

if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;

// rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) &
// 0xff00) | ((b >> 10) & 0xff);
// rgba, divide 2^10 ( >> 10)
rgba[yp] = ((r << 14) & 0xff000000) | ((g << 6) & 0xff0000)
| ((b >> 2) | 0xff00);
}
}
}

/**
* Converts YUV420 NV21 to RGB8888
*
* @param data
* byte array on YUV420 NV21 format.
* @param width
* pixels width
* @param height
* pixels height
* @return a RGB8888 pixels int array. Where each int is a pixels ARGB.
*/
public static void convertYUV420_NV21toRGB8888(int[] rgba, byte[] data,
int width, int height) {
int size = width * height;
int offset = size;
// int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;

// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < size; i += 2, k += 2) {
y1 = data[i] & 0xff;
y2 = data[i + 1] & 0xff;
y3 = data[width + i] & 0xff;
y4 = data[width + i + 1] & 0xff;

u = data[offset + k] & 0xff;
v = data[offset + k + 1] & 0xff;
u = u - 128;
v = v - 128;

rgba[i] = convertYUVtoRGB(y1, u, v);
rgba[i + 1] = convertYUVtoRGB(y2, u, v);
rgba[width + i] = convertYUVtoRGB(y3, u, v);
rgba[width + i + 1] = convertYUVtoRGB(y4, u, v);

if (i != 0 && (i + 2) % width == 0)
i += width;
}

// return pixels;
}

private static int convertYUVtoRGB(int y, int u, int v) {
int r, g, b;

r = y + (int) 1.402f * v;
g = y - (int) (0.344f * u + 0.714f * v);
b = y + (int) 1.772f * u;
r = r > 255 ? 255 : r < 0 ? 0 : r;
g = g > 255 ? 255 : g < 0 ? 0 : g;
b = b > 255 ? 255 : b < 0 ? 0 : b;
return 0xff000000 | (b << 16) | (g << 8) | r;
}

// @Override
// public boolean onCreateOptionsMenu(Menu menu) {
// // Inflate the menu; this adds items to the action bar if it is present.
// getMenuInflater().inflate(R.menu.activity_main, menu);
// return true;
// }
public native String stringFromJNI();

private native void setVideoSurface(Surface surface);

private native void test(Surface surface);

private native void test2(Surface surface);
private native void drawYUV(int[] yuvData);
static {
System.loadLibrary("drawSurface");
}
}最后是mk文件

# Copyright (C) 2009 The Android Open Source Project

## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.#LOCAL_LDFLAGS += -lstdc++LOCAL_PATH := $(call my-dir)MY_ANDROID_SOURCE := D:/Android_Source/mydroidinclude $(CLEAR_VARS)LOCAL_LDLIBS :=-llog \ D:/app/libutils.so \ D:/app/libgui.so \ D:/app/libui.soLOCAL_MODULE := drawSurfaceLOCAL_SRC_FILES := drawSurface.cLOCAL_SRC_FILES += \ invoked.cppLOCAL_SHARED_LIBRARIES := \ libskia\ libsurfaceflinger\ libgui\ D:/app/libutils\ D:/app/libguiLOCAL_C_INCLUDES += \ $(MY_ANDROID_SOURCE)/frameworks/native/include \ $(MY_ANDROID_SOURCE)/system/core/include \ $(MY_ANDROID_SOURCE)/hardware/libhardware/include \ $(JNI_H_INCLUDE)include $(BUILD_SHARED_LIBRARY)

思路呢

一开始在网上找到jni层代码后,开始编译链接

由于Surface的定义在NDK中没有,只有下载安卓源码才行

意识到这个问题也是看了好多篇帖子才转过弯来

mk文件中一个一个添加上头文件后,又遇到一些函数找不到的问题

怎么办?

先根据提示,看找不到的函数在哪个头文件里,然后去头文件对应的cpp文件所在的文件夹,然后打开该文件夹下的mk文件

看看生成的是哪个库,然后利用adb把模拟器里相应的库pull下来

路径 添加到mk文件中

总算是弄成功了

虽然特定区域画图还存在问题

思路:

一开始碰到函数找不到,师兄说安卓so里 肯定有

怎么找so,想到mk文件

果然成功

一般性总结:

遇到问题,把想到的所以思路都一条一条写下来。

然后一个一个去试。仔细,耐心
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: