Android系统OTG_usb链接摄像头的驱动和使用

来源:互联网 发布:qq邮箱stmp服务器端口 编辑:程序博客网 时间:2024/06/06 02:54

原文地址:点击查看

这段时间做一个Android项目,需要在一台Android 4.4.4系统设备上链接usb摄像头,用作人脸识别,
于是作为Android项目组“骨干”(哈哈臭屁一下^-^)的我开始了usb摄像头攻坚战,爬帖子,找博客,
最后眼睛瞅准了一篇博客(感谢)
http://blog.csdn.net/sukhoi27smk/article/details/18269097

在他的帮助下我找到了
https://bitbucket.org/neuralassembly/simplewebcam/src

然后下载源码开始了我的usb摄像头驱动之旅

我整理的库文件下载点击查看

首先是在Linux Ubuntu系统下面配置环境,AS,NDK,如果想写C代码方便可以安装QT(个人推荐,其他都行,比如gedit就很好使)
软件安装,环境配置,这些自当不必多说。。。网上有N多种方式可以搞定

接下来就开始代码改造

找到jni文件夹下的ImageProc.c文件修改其中的jni接口方法名,使之指向CameraPreview.java

我修改后如下所示,找到其他的jni方法用同样方式修改方法名(不要忘了同步.h文件中的方法名哦。。。):

void Java_com_mojsoft_usbcamera_view_CameraPreview_pixeltobmp( JNIEnv* env,jobject thiz,jobject bitmap)

Ctrl+Alt+T打开终端,cd到你的jni目录下,运行ndk-build,编译成功

cd .. 回到jni目录外面,运行ls就可以看到libs,obj两个目录,将libs中的libImageProc.so文件拷贝到你项目中的jniLibs目录下(AS)

接下来是java调用 [注意:java包名和C文件方法格式一致]

将CameraPreview文件放到布局中,编译运行。。。

接下来就是盯着屏幕。。。

满满的惊喜,问题就这么解决了,哈哈

下面是主要的CameraSurfaView类:

class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, Runnable {    private static final boolean DEBUG = true;    private static final String TAG = "WebCam";    protected Context context;    private SurfaceHolder holder;    Thread mainLoop = null;    private Bitmap bmp = null;    private byte[] byteArrary;    private boolean cameraExists = false;    private boolean shouldStop = false;    // /dev/videox (x=cameraId+cameraBase) is used.    // In some omap devices, system uses /dev/video[0-3],    // so users must use /dev/video[4-].    // In such a case, try cameraId=0 and cameraBase=4    private int cameraId = 0;    private int cameraBase = 0;    // This definition also exists in ImageProc.h.    // Webcam must support the resolution 640x480 with YUYV format.    static final int IMG_WIDTH = 640;    static final int IMG_HEIGHT = 480;    // The following variables are used to draw camera images.    private int winWidth = 0;    private int winHeight = 0;    private Rect rect;    private int dw, dh;    private float rate;    private boolean isFramAready = false;    private AlreadyRGBListener listener;    // JNI functions    public native int prepareCamera(int videoid);    public native int prepareCameraWithBase(int videoid, int camerabase);    public native void processCamera();    public native void stopCamera();    public native void pixeltobmp(Bitmap bitmap);    public native byte[] getRgb();    static {    System.loadLibrary("ImageProc");    }    public CameraPreview(Context context) {    super(context);    this.context = context;    if (DEBUG) Log.d(TAG, "CameraPreview constructed");    setFocusable(true);    holder = getHolder();    holder.addCallback(this);    holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);    }    public CameraPreview(Context context, AttributeSet attrs) {    super(context, attrs);    this.context = context;    if (DEBUG) Log.d(TAG, "CameraPreview constructed");    setFocusable(true);    holder = getHolder();    holder.addCallback(this);    holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);    }    @Override    public void run() {    while (true && cameraExists) {        //obtaining display area to draw a large image        if (winWidth == 0) {            winWidth = this.getWidth();            winHeight = this.getHeight();            if (winWidth * 3 / 4 <= winHeight) {                dw = 0;                dh = (winHeight - winWidth * 3 / 4) / 2;                rate = ((float) winWidth) / IMG_WIDTH;                rect = new Rect(dw, dh, dw + winWidth - 1, dh + winWidth * 3 / 4 - 1);            } else {                dw = (winWidth - winHeight * 4 / 3) / 2;                dh = 0;                rate = ((float) winHeight) / IMG_HEIGHT;                rect = new Rect(dw, dh, dw + winHeight * 4 / 3 - 1, dh + winHeight - 1);            }        }        // obtaining a camera image (pixel data are stored in an array in JNI).        processCamera();        // camera image to bmp        pixeltobmp(bmp);        byte[] arr = getRgb();        isFramAready = true;        Canvas canvas = getHolder().lockCanvas();        if (canvas != null) {            // draw camera bmp on canvas            canvas.drawBitmap(bmp, null, rect, null);            getHolder().unlockCanvasAndPost(canvas);        }        listener.alreadyRGB(arr);        if (shouldStop) {            shouldStop = false;            break;        }    }    }    @Override    public void surfaceCreated(SurfaceHolder holder) {    if (DEBUG) Log.d(TAG, "surfaceCreated");    if (bmp == null) {        bmp = Bitmap.createBitmap(IMG_WIDTH, IMG_HEIGHT, Bitmap.Config.ARGB_8888);    }    if (byteArrary == null) {        byteArrary = new byte[IMG_WIDTH * IMG_HEIGHT];    }    // /dev/videox (x=cameraId + cameraBase) is used    //下面一句是原代码,我尝试失败,于是我将其修改为 int ret = prepareCamera(0);后成功调用    // int ret = prepareCameraWithBase(cameraId, cameraBase);    int ret = prepareCamera(0);    if (ret != -1) cameraExists = true;    mainLoop = new Thread(this);    mainLoop.start();    }    @Override    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {    if (DEBUG) Log.d(TAG, "surfaceChanged");    }    @Override    public void surfaceDestroyed(SurfaceHolder holder) {    if (DEBUG) Log.d(TAG, "surfaceDestroyed");    if (cameraExists) {        shouldStop = true;        while (shouldStop) {            try {                Thread.sleep(50); // wait for thread stopping            } catch (Exception e) {            }        }    }    stopCamera();    }    public void setGetBitmapListener(AlreadyRGBListener listener){        this.listener = listener;        }}
0 0
原创粉丝点击