Native Camera和OpenGL ES的混合使用

来源:互联网 发布:淘宝怎么样做爆款 编辑:程序博客网 时间:2024/05/16 08:46

      由于android各个版本中相机实现的差异很大,所以google并没有提供相机的原生api。网上在JNI处理Camera的Preview Data,大多是用SurfaceView来实能够处理将Camera的Preview传递到JNI然后在进行处理,这样导致一个问题就是如果再将处理完的数据再显示,就必须增加多一个SurfaceView或者ImageView控件,对系统资源消耗比较大,而且效率并不比Java块。

      OpenCV 能够在JNI调用Camera,看起来是绕过了Java Camera API,通过Android 的本地框架,实现了原生Camera的调用。

      在这里提供一个原生Camera与OpenGL使用的例子,原理是通过OpenCV调用Camera,并将Camera数据作为纹理给OpenGL渲染,并通过glSubTexSubImage2D来更新,为了保证OpenGL和OpenCV的同步(也就是OpenCV的每一帧同步到OpenGL的纹理中),使用了pthread的锁来进行控制. 感谢Github的原作者Mesai

 Java Code 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
package com.blogspot.mesai0;

import java.util.List;

import org.opencv.android.CameraBridgeViewBase.ListItemAccessor;
import org.opencv.android.NativeCameraView.OpenCvSizeAccessor;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;

import android.app.Activity;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.view.Display;

public class CameraPreviewer extends Activity {

    GLSurfaceView mView;
    
    @Override protected void onCreate(Bundle icicle) {
        super.onCreate(icicle);
        Native.loadlibs();
        VideoCapture mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
        java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();
        mCamera.release();
 
        mView = new GLSurfaceView(getApplication()){
         @Override
         public void onPause() {
          // TODO Auto-generated method stub
          super.onPause();
           Native.releaseCamera();
         }
        };
        Size size = calculateCameraFrameSize(sizes,new OpenCvSizeAccessor());
        mView.setRenderer(new CameraRenderer(this,size));
        setContentView(mView);
    }
    
 protected Size calculateCameraFrameSize(List supportedSizes,
   ListItemAccessor accessor) {
  int calcWidth = Integer.MAX_VALUE;
  int calcHeight = Integer.MAX_VALUE;

  Display display = getWindowManager().getDefaultDisplay();

  int maxAllowedWidth = 1024;
  int maxAllowedHeight = 1024;

  for (Object size : supportedSizes) {
   int width = accessor.getWidth(size);
   int height = accessor.getHeight(size);

   if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
    if ( width <= calcWidth 
      && width>=(maxAllowedWidth/2)
      &&(display.getWidth()%width==0||display.getHeight()%height==0)) {
     calcWidth = (int) width;
     calcHeight = (int) height;
    }
   }
  }

  return new Size(calcWidth, calcHeight);
 }
    @Override protected void onPause() {
        super.onPause();
        mView.onPause();
       
    }

    @Override protected void onResume() {
        super.onResume();
        mView.onResume();
        
    }
}

 Java Code 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
package com.blogspot.mesai0;
 
 
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
 
import org.opencv.core.Size;
 
import android.content.Context;
import android.opengl.GLSurfaceView.Renderer;
 
 
public class CameraRenderer implements Renderer {
 
 private Size size;
 private Context context;
 public CameraRenderer(Context c,Size size) {
  super();
  context = c;
  this.size = size;
 }
  
 public void onSurfaceCreated(GL10 gl, EGLConfig config) {
  Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
  Native.initCamera((int)size.width,(int)size.height);
 }
 
 public void onDrawFrame(GL10 gl) {
  Native.renderBackground();
 }
  
 public void onSurfaceChanged(GL10 gl, int width, int height) {
  Native.surfaceChanged(width,height,context.getResources().getConfiguration().orientation);
 }

 Java Code 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
package com.blogspot.mesai0;

public class Native {
    public static void loadlibs() {
        System.loadLibrary("opencv_java");
        System.loadLibrary("NativeCamera");
    }

    public static native void initCamera(int width, int height);

    public static native void releaseCamera();

    public static native void renderBackground();

    public static native void surfaceChanged(int width, int height,
            int orientation);
}

 C++ Code 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
#include <jni.h>
#include <GLES/gl.h>
#include <GLES/glext.h>
#include <android/log.h>
#include <pthread.h>
#include <time.h>
#include <Math.h>
#include <opencv/cv.h>
#include <opencv2/highgui/highgui.hpp>

// Utility for logging:
#define LOG_TAG    "CAMERA_RENDERER"
#define LOG(...)  __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)

GLuint texture;
cv::VideoCapture capture;
cv::Mat buffer[30];
cv::Mat rgbFrame;
cv::Mat inframe;
cv::Mat outframe;
int bufferIndex;
int rgbIndex;
int frameWidth;
int frameHeight;
int screenWidth;
int screenHeight;
int orientation;
pthread_mutex_t FGmutex;
pthread_t frameGrabber;
pthread_attr_t attr;
struct sched_param param;

GLfloat vertices[] = { -1.0f, -1.0f, 0.0f, // V1 - bottom left
        -1.0f, 1.0f, 0.0f, // V2 - top left
        1.0f, -1.0f, 0.0f, // V3 - bottom right
        1.0f, 1.0f, 0.0f // V4 - top right
        };

GLfloat textures[8];

extern "C" {

void drawBackground();
void createTexture();
void destroyTexture();
void *frameRetriever(void*);

JNIEXPORT void JNICALL Java_com_blogspot_mesai0_Native_initCamera(JNIEnv*,
        jobject, jint width, jint height) {
    LOG("Camera Created");
    capture.open(CV_CAP_ANDROID + 0);
    capture.set(CV_CAP_PROP_FRAME_WIDTH, width);
    capture.set(CV_CAP_PROP_FRAME_HEIGHT, height);
    frameWidth = width;
    frameHeight = height;
    LOG("frameWidth = %d", frameWidth);
    LOG("frameHeight = %d", frameHeight);
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glShadeModel(GL_SMOOTH);
    glClearDepthf(1.0f);
    glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
    pthread_attr_t attr;
    pthread_attr_init(&attr);
    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
    pthread_attr_setschedpolicy(&attr, SCHED_FIFO);
    memset(¶m, 0sizeof(param));
    param.sched_priority = 100;
    pthread_attr_setschedparam(&attr, ¶m);
    pthread_create(&frameGrabber, &attr, frameRetriever, NULL);
    pthread_attr_destroy(&attr);

}

JNIEXPORT void JNICALL Java_com_blogspot_mesai0_Native_surfaceChanged(JNIEnv*,
        jobject, jint width, jint height, jint orien) {
    LOG("Surface Changed");
    glViewport(00, width, height);
    if (orien == 1) {
        screenWidth = width;
        screenHeight = height;
        orientation = 1;
    } else {
        screenWidth = height;
        screenHeight = width;
        orientation = 2;
    }

    LOG("screenWidth = %d", screenWidth);
    LOG("screenHeight = %d", screenHeight);
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    float aspect = screenWidth / screenHeight;
    float bt = (float) tan(45 / 2);
    float lr = bt * aspect;
    glFrustumf(-lr * 0.1f, lr * 0.1f, -bt * 0.1f, bt * 0.1f, 0.1f, 100.0f);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glEnable(GL_TEXTURE_2D);
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glClearDepthf(1.0f);
    glEnable(GL_DEPTH_TEST);
    glDepthFunc(GL_LEQUAL);
    createTexture();
}

JNIEXPORT void JNICALL Java_com_blogspot_mesai0_Native_releaseCamera(JNIEnv*,
        jobject) {
    LOG("Camera Released");
    capture.release();
    destroyTexture();

}

void createTexture() {
    textures[0] = ((1024.0f - frameWidth * 1.0f) / 2.0f) / 1024.0f;
    textures[1] = ((1024.0f - frameHeight * 1.0f) / 2.0f) / 1024.0f
            + (frameHeight * 1.0f / 1024.0f);
    textures[2] = ((1024.0f - frameWidth * 1.0f) / 2.0f) / 1024.0f
            + (frameWidth * 1.0f / 1024.0f);
    textures[3] = ((1024.0f - frameHeight * 1.0f) / 2.0f) / 1024.0f
            + (frameHeight * 1.0f / 1024.0f);
    textures[4] = ((1024.0f - frameWidth * 1.0f) / 2.0f) / 1024.0f;
    textures[5] = ((1024.0f - frameHeight * 1.0f) / 2.0f) / 1024.0f;
    textures[6] = ((1024.0f - frameWidth * 1.0f) / 2.0f) / 1024.0f
            + (frameWidth * 1.0f / 1024.0f);
    textures[7] = ((1024.0f - frameHeight * 1.0f) / 2.0f) / 1024.0f;
    LOG("Texture Created");
    glGenTextures(1, &texture);
    glBindTexture(GL_TEXTURE_2D, texture);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 102410240, GL_RGB,
            GL_UNSIGNED_SHORT_5_6_5, NULL);
    glBindTexture(GL_TEXTURE_2D, 0);
}

void destroyTexture() {
    LOG("Texture destroyed");
    glDeleteTextures(1, &texture);
}

JNIEXPORT void JNICALL Java_com_blogspot_mesai0_Native_renderBackground(JNIEnv*,
        jobject) {
    drawBackground();
}

void drawBackground() {
    glClear(GL_COLOR_BUFFER_BIT);
    glBindTexture(GL_TEXTURE_2D, texture);
    if (bufferIndex > 0) {
        pthread_mutex_lock(&FGmutex);
        cvtColor(buffer[(bufferIndex - 1) % 30], outframe, CV_BGR2BGR565);
        pthread_mutex_unlock(&FGmutex);
        cv::flip(outframe, rgbFrame, 1);
        if (texture != 0)
            glTexSubImage2D(GL_TEXTURE_2D, 0, (1024 - frameWidth) / 2,
                    (1024 - frameHeight) / 2, frameWidth, frameHeight, GL_RGB,
                    GL_UNSIGNED_SHORT_5_6_5, rgbFrame.ptr());
    }
    glEnableClientState(GL_VERTEX_ARRAY);
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);
    glLoadIdentity();
    if (orientation != 1) {
        glRotatef(90001);
    }
    // Set the face rotation
    glFrontFace(GL_CW);
    // Point to our vertex buffer
    glVertexPointer(3, GL_FLOAT, 0, vertices);
    glTexCoordPointer(2, GL_FLOAT, 0, textures);
    // Draw the vertices as triangle strip
    glDrawArrays(GL_TRIANGLE_STRIP, 04);
    //Disable the client state before leaving
    glDisableClientState(GL_VERTEX_ARRAY);
    glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}

void *frameRetriever(void*) {
    while (capture.isOpened()) {
        capture.read(inframe);
        if (!inframe.empty()) {
            pthread_mutex_lock(&FGmutex);
            inframe.copyTo(buffer[(bufferIndex++) % 30]);
            pthread_mutex_unlock(&FGmutex);
        }
    }
    LOG("Camera Closed");
    pthread_exit(NULL);
}

}

android.mk

 C++ Code 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE := opencv-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libopencv_java.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
LOCAL_MODULE := camera1-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libnative_camera_r4.2.0.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
LOCAL_MODULE := camera2-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libnative_camera_r4.1.1.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
LOCAL_MODULE := camera3-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libnative_camera_r4.0.3.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
LOCAL_MODULE := camera4-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libnative_camera_r4.0.0.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
LOCAL_MODULE := camera5-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libnative_camera_r3.0.1.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
LOCAL_MODULE := camera6-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libnative_camera_r2.3.3.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
LOCAL_MODULE := camera7-prebuilt
LOCAL_SRC_FILES = build/libs/$(TARGET_ARCH_ABI)/libnative_camera_r2.2.0.so
include $(PREBUILT_SHARED_LIBRARY)
 
include $(CLEAR_VARS)
OPENGLES_LIB  := -lGLESv1_CM
OPENGLES_DEF  := -DUSE_OPENGL_ES_1_1
LOCAL_MODULE    := NativeCamera
LOCAL_SHARED_LIBRARIES := opencv-prebuilt 
LOCAL_SRC_FILES := CameraRenderer.cpp
LOCAL_LDLIBS +=  $(OPENGLES_LIB) -llog -ldl
 
include $(BUILD_SHARED_LIBRARY)

application.mk

 C++ Code 
1
2
3
4
5
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a 
APP_PLATFORM := android-8

注:1.由于Android的版本差异,所以使用OpenCV的Navtive Camera还是比较危险的,可能会出现奔溃的情况(本人三星 I9220测试通过)

       2.为了应付android的各个版本,OpenCV在封装android的Camera的时候也有了不同的Camera.so,在android.mk说明了不同版本使用不同的camera.so,这样就极大降低了程序奔溃的可能。

       3.OpenCV的环境还是需要自己配置一下。

最后附上Github原作者的地址 NativeCamera

0 0
原创粉丝点击