Camera 运行流程 与 Preview过程分析

来源:互联网 发布:包月网络电话卡 编辑:程序博客网 时间:2024/05/17 00:10

转自:http://www.2cto.com/kf/201304/205451.html


Android Camera 运行流程

首先既然Camera是利用binder通信,它肯定要将它的service注册到ServiceManager里面,以备后续Client引用,那么这一步是在哪里进行的呢?细心的人会发现,在frameworks\base\media\mediaserver\Main_MediaServer.cpp下有个main函数,可以用来注册媒体服务。没错就是在这里,CameraService完成了服务的注册,相关代码如下:

int main(int argc, char** argv)
{
    sp<ProcessState> proc(ProcessState::self());
    sp<IServiceManager> sm = defaultServiceManager();
    LOGI("ServiceManager: %p", sm.get());
    AudioFlinger::instantiate();
    MediaPlayerService::instantiate();
    CameraService::instantiate();
    AudioPolicyService::instantiate();
    ProcessState::self()->startThreadPool();
    IPCThreadState::self()->joinThreadPool();
}

可是我们到CameraService文件里面却找不到instantiate()这个函数,它在哪?继续追到它的一个父类BinderService,

CameraService的定义在frameworks/base/services/camera/libcameraservice/CameraService.h中

class CameraService :
    public BinderService<CameraService>,
    public BnCameraService
{
    class Client;
    friend class BinderService<CameraService>;
public:
    static char const* getServiceName() { return "media.camera"; }
    .....

    .....

}

从以上定义可以看出CameraService 继承于BinderService,所以CameraService::instantiate(); 其实是调用BinderService中的instantiate

BinderService的定义在frameworks/base/include/binder/BinderService.h中

// ---------------------------------------------------------------------------
namespace android {

template<typename SERVICE>
class BinderService
{
public:
    static status_t publish() {
        sp<IServiceManager> sm(defaultServiceManager());
        return sm->addService(String16(SERVICE::getServiceName()), new SERVICE());
    }


    static void publishAndJoinThreadPool() {
        sp<ProcessState> proc(ProcessState::self());
        sp<IServiceManager> sm(defaultServiceManager());
        sm->addService(String16(SERVICE::getServiceName()), new SERVICE());
        ProcessState::self()->startThreadPool();
        IPCThreadState::self()->joinThreadPool();
    }


    static void instantiate() { publish(); }


    static status_t shutdown() {
        return NO_ERROR;
    }
};

}; // namespace android
// ---------------------------------------------------------------------------
可以发现在publish()函数中,CameraService完成服务的注册 。这里面有个SERVICE,源码中有说明

template<typename SERVICE>
这表示SERVICE是个模板,这里是注册CameraService,所以可以用CameraService代替
return sm->addService(String16(CameraService::getServiceName()), new CameraService());
好了这样,Camera就在ServiceManager完成服务注册,提供给client随时使用。
Main_MediaServer主函数由init.rc在启动是调用,所以在设备开机的时候Camera就会注册一个服务,用作binder通信。

 

Binder服务已注册,那接下来就看看client如何连上server端,并打开camera模块。咱们先从camera app的源码入手。在onCreate()函数中专门有一个open Camera的线程

camera app的源码文件在以下目录packages/apps/OMAPCamera/src/com/ti/omap4/android/camera/camera.java
    @Override
    public void onCreate(Bundle icicle) {
        super.onCreate(icicle);
        getPreferredCameraId();
        String[] defaultFocusModes = getResources().getStringArray(
                R.array.pref_camera_focusmode_default_array);
        mFocusManager = new FocusManager(mPreferences, defaultFocusModes);


        /*
         * To reduce startup time, we start the camera open and preview threads.
         * We make sure the preview is started at the end of onCreate.
         */
        mCameraOpenThread.start();


        PreferenceInflater inflater = new PreferenceInflater(this);
        PreferenceGroup group =
                (PreferenceGroup) inflater.inflate(R.xml.camera_preferences);


        ListPreference gbce = group.findPreference(CameraSettings.KEY_GBCE);
        if (gbce != null) {
            mGBCEOff = gbce.findEntryValueByEntry(getString(R.string.pref_camera_gbce_entry_off));
            if (mGBCEOff == null) {
                mGBCEOff = "";
            }
        }


        ListPreference autoConvergencePreference = group.findPreference(CameraSettings.KEY_AUTO_CONVERGENCE);
        if (autoConvergencePreference != null) {
            mTouchConvergence = autoConvergencePreference.findEntryValueByEntry(getString(R.string.pref_camera_autoconvergence_entry_mode_touch));
            if (mTouchConvergence == null) {
                mTouchConvergence = "";
            }
            mManualConvergence = autoConvergencePreference.findEntryValueByEntry(getString(R.string.pref_camera_autoconvergence_entry_mode_manual));
            if (mManualConvergence == null) {
                mManualConvergence = "";
            }
        }


        ListPreference exposure = group.findPreference(CameraSettings.KEY_EXPOSURE_MODE_MENU);
        if (exposure != null) {
            mManualExposure = exposure.findEntryValueByEntry(getString(R.string.pref_camera_exposuremode_entry_manual));
            if (mManualExposure == null) {
                mManualExposure = "";
            }
        }


        ListPreference temp = group.findPreference(CameraSettings.KEY_MODE_MENU);
        if (temp != null) {
            mTemporalBracketing = temp.findEntryValueByEntry(getString(R.string.pref_camera_mode_entry_temporal_bracketing));
            if (mTemporalBracketing == null) {
                mTemporalBracketing = "";
            }


            mExposureBracketing = temp.findEntryValueByEntry(getString(R.string.pref_camera_mode_entry_exp_bracketing));
            if (mExposureBracketing == null) {
                mExposureBracketing = "";
            }


            mZoomBracketing = temp.findEntryValueByEntry(getString(R.string.pref_camera_mode_entry_zoom_bracketing));
            if (mZoomBracketing == null) {
                mZoomBracketing = "";
            }


            mHighPerformance = temp.findEntryValueByEntry(getString(R.string.pref_camera_mode_entry_hs));
            if (mHighPerformance == null) {
                mHighPerformance = "";
            }


            mHighQuality = temp.findEntryValueByEntry(getString(R.string.pref_camera_mode_entry_hq));
            if (mHighQuality == null) {
                mHighQuality = "";
            }


            mHighQualityZsl = temp.findEntryValueByEntry(getString(R.string.pref_camera_mode_entry_zsl));
            if (mHighQualityZsl == null) {
                mHighQualityZsl = "";
            }
        }


        getPreferredCameraId();
        mFocusManager = new FocusManager(mPreferences,
                defaultFocusModes);
        mTouchManager = new TouchManager();


        mIsImageCaptureIntent = isImageCaptureIntent();
        setContentView(R.layout.camera);
        if (mIsImageCaptureIntent) {
            mReviewDoneButton = (Rotatable) findViewById(R.id.btn_done);
            mReviewCancelButton = (Rotatable) findViewById(R.id.btn_cancel);
            findViewById(R.id.btn_cancel).setVisibility(View.VISIBLE);
        } else {
            mThumbnailView = (RotateImageView) findViewById(R.id.thumbnail);
            mThumbnailView.enableFilter(false);
            mThumbnailView.setVisibility(View.VISIBLE);
        }


        mRotateDialog = new RotateDialogController(this, R.layout.rotate_dialog);
        mCaptureLayout = getString(R.string.pref_camera_capture_layout_default);


        mPreferences.setLocalId(this, mCameraId);
        CameraSettings.upgradeLocalPreferences(mPreferences.getLocal());


        mNumberOfCameras = CameraHolder.instance().getNumberOfCameras();
        mQuickCapture = getIntent().getBooleanExtra(EXTRA_QUICK_CAPTURE, false);


        // we need to reset exposure for the preview
        resetExposureCompensation();


        Util.enterLightsOutMode(getWindow());


        // don't set mSurfaceHolder here. We have it set ONLY within
        // surfaceChanged / surfaceDestroyed, other parts of the code
        // assume that when it is set, the surface is also set.
        SurfaceView preview = (SurfaceView) findViewById(R.id.camera_preview);
        SurfaceHolder holder = preview.getHolder();
        holder.addCallback(this);


        s3dView = new S3DViewWrapper(holder);


        holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);


        // Make sure camera device is opened.
        try {
            mCameraOpenThread.join();
            mCameraOpenThread = null;
            if (mOpenCameraFail) {
                Util.showErrorAndFinish(this, R.string.cannot_connect_camera);
                return;
            } else if (mCameraDisabled) {
                Util.showErrorAndFinish(this, R.string.camera_disabled);
                return;
            }
        } catch (InterruptedException ex) {
            // ignore
        }
        mCameraPreviewThread.start();


        if (mIsImageCaptureIntent) {
            setupCaptureParams();
        } else {
            mModePicker = (ModePicker) findViewById(R.id.mode_picker);
            mModePicker.setVisibility(View.VISIBLE);
            mModePicker.setOnModeChangeListener(this);
            mModePicker.setCurrentMode(ModePicker.MODE_CAMERA);
        }


        mZoomControl = (ZoomControl) findViewById(R.id.zoom_control);
        mOnScreenIndicators = (Rotatable) findViewById(R.id.on_screen_indicators);
        mLocationManager = new LocationManager(this, this);


        // Wait until the camera settings are retrieved.
        synchronized (mCameraPreviewThread) {
            try {
                mCameraPreviewThread.wait();
            } catch (InterruptedException ex) {
                // ignore
            }
        }


        // Do this after starting preview because it depends on camera
        // parameters.
        initializeIndicatorControl();
        mCameraSound = new CameraSound();


        // Make sure preview is started.
        try {
            mCameraPreviewThread.join();
        } catch (InterruptedException ex) {
            // ignore
        }
        mCameraPreviewThread = null;
    }
再看看mCameraOpenThread

    Thread mCameraOpenThread = new Thread(new Runnable() {
        public void run() {
            try {
                mCameraDevice = Util.openCamera(Camera.this, mCameraId);
            } catch (CameraHardwareException e) {
                mOpenCameraFail = true;
            } catch (CameraDisabledException e) {
                mCameraDisabled = true;
            }
        }
    });

继续追Util.openCamera ,Util类的定义在以下目录:packages/apps/OMAPCamera/src/com/ti/omap4/android/camera/Util.java
    public static android.hardware.Camera openCamera(Activity activity, int cameraId)
            throws CameraHardwareException, CameraDisabledException {
        // Check if device policy has disabled the camera.
        DevicePolicyManager dpm = (DevicePolicyManager) activity.getSystemService(
                Context.DEVICE_POLICY_SERVICE);
        if (dpm.getCameraDisabled(null)) {
            throw new CameraDisabledException();
        }


        try {
            return CameraHolder.instance().open(cameraId);
        } catch (CameraHardwareException e) {
            // In eng build, we throw the exception so that test tool
            // can detect it and report it
            if ("eng".equals(Build.TYPE)) {
                throw new RuntimeException("openCamera failed", e);
            } else {
                throw e;
            }
        }
    }
又来了个CameraHolder,该类用一个实例openCamera

CameraHolder的定义在以下目录:packages/apps/OMAPCamera/src/com/ti/omap4/android/camera/CameraHolder.java

    public synchronized android.hardware.Camera open(int cameraId)
            throws CameraHardwareException {
        Assert(mUsers == 0);
        if (mCameraDevice != null && mCameraId != cameraId) {
            mCameraDevice.release();
            mCameraDevice = null;
            mCameraId = -1;
        }
        if (mCameraDevice == null) {
            try {
                Log.v(TAG, "open camera " + cameraId);
                mCameraDevice = android.hardware.Camera.open(cameraId);
                mCameraId = cameraId;
            } catch (RuntimeException e) {
                Log.e(TAG, "fail to connect Camera", e);
                throw new CameraHardwareException(e);
            }
            mParameters = mCameraDevice.getParameters();
        } else {
            try {
                mCameraDevice.reconnect();
            } catch (IOException e) {
                Log.e(TAG, "reconnect failed.");
                throw new CameraHardwareException(e);
            }
            mCameraDevice.setParameters(mParameters);
        }
        ++mUsers;
        mHandler.removeMessages(RELEASE_CAMERA);
        mKeepBeforeTime = 0;
        return mCameraDevice;
    }

在这里就开始进入framework层了,调用frameworks\base\core\java\android\hardware\Camera.java类的open方法 。

    public static Camera open(int cameraId) {
        return new Camera(cameraId);
    }
这里调用了Camera的构造函数,在看看构造函数
    Camera(int cameraId) {
        mShutterCallback = null;
        mRawImageCallback = null;
        mJpegCallback = null;
        mPreviewCallback = null;
        mPostviewCallback = null;
        mZoomListener = null;


        Looper looper;
        if ((looper = Looper.myLooper()) != null) {
            mEventHandler = new EventHandler(this, looper);
        } else if ((looper = Looper.getMainLooper()) != null) {
            mEventHandler = new EventHandler(this, looper);
        } else {
            mEventHandler = null;
        }

        native_setup(new WeakReference<Camera>(this), cameraId);
    }
好,终于来到JNI了

 



继续看camera的JNI文件:frameworks/base/core/jni# gedit android_hardware_Camera.cpp

 


由于前面Camera的构造函数里调用了native_setup(new WeakReference<Camera>(this), cameraId);

那么native_setup()的定义在那里呢

通过我的查看,在frameworks/base/core/jni# gedit android_hardware_Camera.cpp中有这样一个定义,

我认为通过这个定义,使得native_setup和android_hardware_Camera_native_setup 关联起来

static JNINativeMethod camMethods[] = {
  { "getNumberOfCameras",
    "()I",
    (void *)android_hardware_Camera_getNumberOfCameras },
  { "getCameraInfo",
    "(ILandroid/hardware/Camera$CameraInfo;)V",
    (void*)android_hardware_Camera_getCameraInfo },
  { "native_setup",
    "(Ljava/lang/Object;I)V",
    (void*)android_hardware_Camera_native_setup },
  { "native_release",
    "()V",
    (void*)android_hardware_Camera_release },
  { "setPreviewDisplay",
    "(Landroid/view/Surface;)V",
    (void *)android_hardware_Camera_setPreviewDisplay },
  { "setPreviewTexture",
    "(Landroid/graphics/SurfaceTexture;)V",
    (void *)android_hardware_Camera_setPreviewTexture },
  { "startPreview",
    "()V",
    (void *)android_hardware_Camera_startPreview },
  { "_stopPreview",
    "()V",
    (void *)android_hardware_Camera_stopPreview },
  { "previewEnabled",
    "()Z",
    (void *)android_hardware_Camera_previewEnabled },
  { "setHasPreviewCallback",
    "(ZZ)V",
    (void *)android_hardware_Camera_setHasPreviewCallback },
  { "_addCallbackBuffer",
    "([BI)V",
    (void *)android_hardware_Camera_addCallbackBuffer },
  { "native_autoFocus",
    "()V",
    (void *)android_hardware_Camera_autoFocus },
  { "native_cancelAutoFocus",
    "()V",
    (void *)android_hardware_Camera_cancelAutoFocus },
  { "native_takePicture",
    "(I)V",
    (void *)android_hardware_Camera_takePicture },
  { "native_setParameters",
    "(Ljava/lang/String;)V",
    (void *)android_hardware_Camera_setParameters },
  { "native_getParameters",
    "()Ljava/lang/String;",
    (void *)android_hardware_Camera_getParameters },
  { "reconnect",
    "()V",
    (void*)android_hardware_Camera_reconnect },
  { "lock",
    "()V",
    (void*)android_hardware_Camera_lock },
  { "unlock",
    "()V",
    (void*)android_hardware_Camera_unlock },
  { "startSmoothZoom",
    "(I)V",
    (void *)android_hardware_Camera_startSmoothZoom },
  { "stopSmoothZoom",
    "()V",
    (void *)android_hardware_Camera_stopSmoothZoom },
  { "setDisplayOrientation",
    "(I)V",
    (void *)android_hardware_Camera_setDisplayOrientation },
  { "_startFaceDetection",
    "(I)V",
    (void *)android_hardware_Camera_startFaceDetection },
  { "_stopFaceDetection",
    "()V",
    (void *)android_hardware_Camera_stopFaceDetection},
};

所以,native_setup(new WeakReference<Camera>(this), cameraId);这个调用即是对下面android_hardware_Camera_native_setup这个函数的调用


// connect to camera service
static void android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
    jobject weak_this, jint cameraId)
{
    sp<Camera> camera = Camera::connect(cameraId);

    if (camera == NULL) {
        jniThrowRuntimeException(env, "Fail to connect to camera service");
        return;
    }


    // make sure camera hardware is alive
    if (camera->getStatus() != NO_ERROR) {
        jniThrowRuntimeException(env, "Camera initialization failed");
        return;
    }


    jclass clazz = env->GetObjectClass(thiz);
    if (clazz == NULL) {
        jniThrowRuntimeException(env, "Can't find android/hardware/Camera");
        return;
    }


    // We use a weak reference so the Camera object can be garbage collected.
    // The reference is only used as a proxy for callbacks.
    sp<JNICameraContext> context = new JNICameraContext(env, weak_this, clazz, camera);
    context->incStrong(thiz);
    camera->setListener(context);


    // save context in opaque field
    env->SetIntField(thiz, fields.context, (int)context.get());
}
JNI函数里面,我们找到Camera C/S架构的客户端了,它调用connect函数向服务器发送连接请求。JNICameraContext这个类是一个监听类,用于处理底层Camera回调函数传来的数据和消息
看看客户端的connect函数有什么,connect定义在以下路径frameworks/base/libs/camera/camera.cpp

sp<Camera> Camera::connect(int cameraId)
{
    LOGV("connect");
    sp<Camera> c = new Camera();
    const sp<ICameraService>& cs = getCameraService();
    if (cs != 0) {
        c->mCamera = cs->connect(c, cameraId);
    }
    if (c->mCamera != 0) {
        c->mCamera->asBinder()->linkToDeath(c);
        c->mStatus = NO_ERROR;
    } else {
        c.clear();
    }
    return c;
}

 


const sp<ICameraService>& cs =getCameraService();获取CameraService实例。

进入getCameraService()中

// establish binder interface to camera service
const sp<ICameraService>& Camera::getCameraService()
{
    Mutex::Autolock _l(mLock);
    if (mCameraService.get() == 0) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder;
        do {
            binder = sm->getService(String16("media.camera"));
            if (binder != 0)
                break;
            LOGW("CameraService not published, waiting...");
            usleep(500000); // 0.5 s
        } while(true);
        if (mDeathNotifier == NULL) {
            mDeathNotifier = new DeathNotifier();
        }
        binder->linkToDeath(mDeathNotifier);
        mCameraService = interface_cast<ICameraService>(binder);
    }
    LOGE_IF(mCameraService==0, "no CameraService!?");
    return mCameraService;
}
CameraService实例通过binder获取的,mCameraService即为CameraService的实例。
 
回到sp<Camera> Camera::connect(int cameraId)中
c->mCamera = cs->connect(c, cameraId);
即:执行server的connect()函数,并且返回ICamera对象,赋值给Camera的mCamera,服务端connect()返回的是他内部类的一个实例。
server的connect()函数定义在以下路径:frameworks/base/services/camera/libcameraservice/CameraService.cpp

sp<ICamera> CameraService::connect(
        const sp<ICameraClient>& cameraClient, int cameraId) {
    int callingPid = getCallingPid();
    sp<CameraHardwareInterface> hardware = NULL;


    LOG1("CameraService::connect E (pid %d, id %d)", callingPid, cameraId);


    if (!mModule) {
        LOGE("Camera HAL module not loaded");
        return NULL;
    }


    sp<Client> client;
    if (cameraId < 0 || cameraId >= mNumberOfCameras) {
        LOGE("CameraService::connect X (pid %d) rejected (invalid cameraId %d).",
            callingPid, cameraId);
        return NULL;
    }


    char value[PROPERTY_VALUE_MAX];
    property_get("sys.secpolicy.camera.disabled", value, "0");
    if (strcmp(value, "1") == 0) {
        // Camera is disabled by DevicePolicyManager.
        LOGI("Camera is disabled. connect X (pid %d) rejected", callingPid);
        return NULL;
    }


    Mutex::Autolock lock(mServiceLock);
    if (mClient[cameraId] != 0) {
        client = mClient[cameraId].promote();
        if (client != 0) {
            if (cameraClient->asBinder() == client->getCameraClient()->asBinder()) {
                LOG1("CameraService::connect X (pid %d) (the same client)",
                    callingPid);
                return client;
            } else {
                LOGW("CameraService::connect X (pid %d) rejected (existing client).",
                    callingPid);
                return NULL;
            }
        }
        mClient[cameraId].clear();
    }


    if (mBusy[cameraId]) {
        LOGW("CameraService::connect X (pid %d) rejected"
             " (camera %d is still busy).", callingPid, cameraId);
        return NULL;
    }


    struct camera_info info;
    if (mModule->get_camera_info(cameraId, &info) != OK) {
        LOGE("Invalid camera id %d", cameraId);
        return NULL;
    }


    char camera_device_name[10];
    snprintf(camera_device_name, sizeof(camera_device_name), "%d", cameraId);


    hardware = new CameraHardwareInterface(camera_device_name);
    if (hardware->initialize(&mModule->common) != OK) {
        hardware.clear();
        return NULL;
    }


    client = new Client(this, cameraClient, hardware, cameraId, info.facing, callingPid);
    mClient[cameraId] = client;
    LOG1("CameraService::connect X");
    return client;
}
实例化Camera Hal接口 hardware,hardware调用initialize()进入HAL层打开Camear驱动。
CameraHardwareInterface中initialize()定义在以下路径:frameworks/base/services/camera/libcameraservice/CameraHardwareInterface.h

代码如下:

    status_t initialize(hw_module_t *module)
    {
        LOGI("Opening camera %s", mName.string());
        int rc = module->methods->open(module, mName.string(),
                                       (hw_device_t **)&mDevice);
        if (rc != OK) {
            LOGE("Could not open camera %s: %d", mName.string(), rc);
            return rc;
        }
#ifdef OMAP_ENHANCEMENT_CPCAM
        initHalPreviewWindow(&mHalPreviewWindow);
        initHalPreviewWindow(&mHalTapin);
        initHalPreviewWindow(&mHalTapout);
#else
        initHalPreviewWindow();
#endif
        return rc;
    }
此处通过module->method->open()方法真正打开Camera设备,

其中module的定义在以下路径:

class CameraService :
    public BinderService<CameraService>,
    public BnCameraService
{

    class Client : public BnCamera
    {
    public:
        ......

 

    private:

        .....

    };

    camera_module_t *mModule;

};

此处还必须找到camera_module_t 的定义,以更好的理解整个运行流程,通过追根溯源找到了camera_module_t 定义,

camera_module_t的定义在以下路径:hardware/libhardware/include/hardware/camera.h中,定义如下

typedef struct camera_module {
    hw_module_t common;
    int (*get_number_of_cameras)(void);
    int (*get_camera_info)(int camera_id, struct camera_info *info);
} camera_module_t;
其中包含get_number_of_cameras方法和get_camera_info方法用于获取camera info

另外hw_module_t common;这个选项十分重要,此处应重点关注,因为是使用hw_module_t结构体中的open()方法打开设备文件的


继续找到hw_module_t 结构体的定义.在以下路径:hardware/libhardware/include/hardware/hardware.h,代码如下:

struct hw_module_t;
struct hw_module_methods_t;
struct hw_device_t;


/**
 * Every hardware module must have a data structure named HAL_MODULE_INFO_SYM
 * and the fields of this data structure must begin with hw_module_t
 * followed by module specific information.
 */
typedef struct hw_module_t {
    /** tag must be initialized to HARDWARE_MODULE_TAG */
    uint32_t tag;


    /** major version number for the module */
    uint16_t version_major;


    /** minor version number of the module */
    uint16_t version_minor;


    /** Identifier of module */
    const char *id;


    /** Name of this module */
    const char *name;


    /** Author/owner/implementor of the module */
    const char *author;


    /** Modules methods */
    struct hw_module_methods_t* methods;


    /** module's dso */
    void* dso;


    /** padding to 128 bytes, reserved for future use */
    uint32_t reserved[32-7];


} hw_module_t;

 

同样,找到hw_module_methods_t这个结构体的定义,代码如下:

typedef struct hw_module_methods_t {
    /** Open a specific device */
    int (*open)(const struct hw_module_t* module, const char* id,
            struct hw_device_t** device);
} hw_module_methods_t;

hw_module_methods_t 结构体中只有open()一个方法,用于打开camera driver,实现与硬件层的交互

到此为止,很容易看出:

Android中Camera的调用流程可分为以下几个层次:
Package->Framework->JNI->Camera(cpp)--(binder)-->CameraService->Camera HAL->Camera Driver


Android Camera OMX方式Preview过程分析

转自:   http://www.2cto.com/kf/201304/205454.html


在之前的文章中已经说过OMXCameraAdapter的初始化了,为了更好的了解A9和Ducati的数据交互过程,这里很有必要深入研究一下Camera采用OMX方式的Preview过程

这里我们还是从CameraHal开始我们对preview过程的分析吧,因为hal层的preview方法对整个preview过程做了一些很重要的初始化,看看代码吧   @brief Start preview mode.

   @param none

   @todo Update function header with the different errors that are possible


    下面调用的这个方法是我们关注的重点,他实现了很多preview开始前的初始化

/**

   @param none

   @todo Update function header with the different errors that are possible


    这里是我添加的注释,这里这个mPreviewStartInProgress表示camera preview是否正在进行,false则表示不在进行,mDisplayPaused表示camera已经开始显示,只是暂时停止了,这两个状态的检查表明这里是第一次调用preview,初次使用要查询camera匹配的分辨率,所以这里查询获得宽和高,同时保持在外面的全局变量中,以备之后使用

    if ((mPreviewStartInProgress == false) && (mDisplayPaused == false)){

      ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW,( int ) &frame);

      if ( NO_ERROR != ret ){

        CAMHAL_LOGEB("Error: CAMERA_QUERY_RESOLUTION_PREVIEW %d", ret);

        return ret;

      }


      ///Update the current preview width and height

      mPreviewWidth = frame.mWidth;

      mPreviewHeight = frame.mHeight;

    }

    这里我们没有设置preview callback同时也没有使能display adapter,那么我们既没有使用VL4CameraAdapter方式,也没有使用overlay方式,那么OMX方式就是我们唯一的选择了,所以这里让组件进入到Excuting state

    ///If we don't have the preview callback enabled and display adapter,

    if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){

      CAMHAL_LOGD("Preview not started. Preview in progress flag set");

      mPreviewStartInProgress = true;

      ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING);

      if ( NO_ERROR != ret ){

        CAMHAL_LOGEB("Error: CAMERA_SWITCH_TO_EXECUTING %d", ret);

        return ret;

      }

      return NO_ERROR;

    }

    这里判断我们使用overlay方式,但是这里其实只是暂停了preview,这里做的工作只是从新开启preview,并且开始preview callback

    if( (mDisplayAdapter.get() != NULL) && ( !mPreviewEnabled ) && ( mDisplayPaused ) )

        {

        CAMHAL_LOGDA("Preview is in paused state");


        mDisplayPaused = false;

        mPreviewEnabled = true;

        if ( NO_ERROR == ret )

            {

            ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);


            if ( NO_ERROR != ret )

                {

                CAMHAL_LOGEB("Display adapter resume failed %x", ret);

                }

            }

        //restart preview callbacks

        if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)

        {

            mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME);

        }


        signalEndImageCapture();

        return ret;

        }

    获取到属性中的指定的buffer count

    required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));


    ///Allocate the preview buffers<span color:teal;"="" style="word-wrap: break-word; font-size: 10pt;">

    ret = allocPreviewBufs(mPreviewWidth, mPreviewHeight, mParameters.getPreviewFormat(), required_buffer_count,max_queueble_buffers);


    if ( NO_ERROR != ret )

        {

        CAMHAL_LOGEA("Couldn't allocate buffers for Preview");

        goto error;

        }

    这里其实我一直想不清楚这个MeasurementEnable到底是哪个功能的flag,暂认为是测试数据专用回调吧

    if ( mMeasurementEnabled )

        {

        这里先获取分辨率中的长度

        ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA,

                                          ( int ) &frame,

                                          required_buffer_count);

        if ( NO_ERROR != ret )

            {

            return ret;

            }


         ///Allocate the preview data buffers

        ret = allocPreviewDataBufs(frame.mLength, required_buffer_count);

        if ( NO_ERROR != ret ) {

            CAMHAL_LOGEA("Couldn't allocate preview data buffers");

            goto error;

           }


        if ( NO_ERROR == ret )

            {

            desc.mBuffers = mPreviewDataBuffers;

            desc.mOffsets = mPreviewDataOffsets;

            desc.mFd = mPreviewDataFd;

            desc.mLength = mPreviewDataLength;

            desc.mCount = ( size_t ) required_buffer_count;

            desc.mMaxQueueable = (size_t) required_buffer_count;

上面通过desc这个变量打包我们的数据,他是BuffersDescriptor类型的变量,也就是buffer属性之类的包,然后调用sendCommand,使用自己申请好的buffer,这里其实是我看这个初始化的重点,当然还有后面的一个sendCommand         mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA,

                                        ( int ) &desc);

            }


        }


    ///Pass the buffers to Camera Adapter

    desc.mBuffers = mPreviewBuffers;

    desc.mOffsets = mPreviewOffsets;

    desc.mFd = mPreviewFd;

    desc.mLength = mPreviewLength;

    desc.mCount = ( size_t ) required_buffer_count;

    desc.mMaxQueueable = (size_t) max_queueble_buffers;

    还有就是这里的这个sendCommand了

    ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW,

                                      ( int ) &desc);


    if ( NO_ERROR != ret )

        {

        CAMHAL_LOGEB("Failed to register preview buffers: 0x%x", ret);

        freePreviewBufs();

        return ret;

        }


    mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBuffers, mPreviewOffsets, mPreviewFd, mPreviewLength,required_buffer_count);


    ///Start the callback notifier

    ret = mAppCallbackNotifier->start();


    if( ALREADY_EXISTS == ret )

        {

        //Already running, do nothing

        CAMHAL_LOGDA("AppCallbackNotifier already running");

        ret = NO_ERROR;

        }

    else if ( NO_ERROR == ret ) {

        CAMHAL_LOGDA("Started AppCallbackNotifier..");

        mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);

        }

    else

        {

        CAMHAL_LOGDA("Couldn't start AppCallbackNotifier");

        goto error;

        }


    if (ret == NO_ERROR) mPreviewInitializationDone = true;

    return ret;


    error:


        CAMHAL_LOGEA("Performing cleanup after error");


        //Do all the cleanup

        freePreviewBufs();

        mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);

        if(mDisplayAdapter.get() != NULL)

            {

            mDisplayAdapter->disableDisplay(false);

            }

        mAppCallbackNotifier->stop();

        mPreviewStartInProgress = false;

        mPreviewEnabled = false;

        LOG_FUNCTION_NAME_EXIT;


        return ret;

}

这里我们还是分析一下下面这个方法的实现

这个调用最终调用到BaseCameraAdapter下的sendCommand然后调用到OMXCameraAdapter下的方法switchToExecuting,这个方法的实现在下面

我们看看这个方法的实现

status_t OMXCameraAdapter::doSwitchToExecuting()

{

  status_t ret = NO_ERROR;

  OMX_ERRORTYPE eError = OMX_ErrorNone;

  LOG_FUNCTION_NAME;


  if ( (mComponentState == OMX_StateExecuting) || (mComponentState == OMX_StateInvalid) ){

    CAMHAL_LOGDA("Already in OMX_Executing state or OMX_StateInvalid state");

    mStateSwitchLock.unlock();

    return NO_ERROR;

  }


  if ( 0 != mSwitchToExecSem.Count() ){

    CAMHAL_LOGEB("Error mSwitchToExecSem semaphore count %d", mSwitchToExecSem.Count());

    goto EXIT;

  }


  ///Register for Preview port DISABLE  event

  ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,

                         OMX_EventCmdComplete,

                         OMX_CommandPortDisable,

                         mCameraAdapterParameters.mPrevPortIndex,

                         mSwitchToExecSem);

  if ( NO_ERROR != ret ){

    CAMHAL_LOGEB("Error in registering Port Disable for event %d", ret);

    goto EXIT;

  }

  ///Disable Preview Port

  eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,

                           OMX_CommandPortDisable,

                           mCameraAdapterParameters.mPrevPortIndex,

                           NULL);

  ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT);

  if (ret != NO_ERROR){

    CAMHAL_LOGEB("Timeout PREVIEW PORT DISABLE %d", ret);

  }


  CAMHAL_LOGVB("PREV PORT DISABLED %d", ret);


  ///Register for IDLE state switch event

  ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,

                         OMX_EventCmdComplete,

                         OMX_CommandStateSet,

                         OMX_StateIdle,

                         mSwitchToExecSem);

  if(ret!=NO_ERROR)

    {

      CAMHAL_LOGEB("Error in IDLE STATE SWITCH %d", ret);

      goto EXIT;

    }

  eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp ,

                            OMX_CommandStateSet,

                            OMX_StateIdle,

                            NULL);

  GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);

  ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT);

  if (ret != NO_ERROR){

    CAMHAL_LOGEB("Timeout IDLE STATE SWITCH %d", ret);

    goto EXIT;

  }

  mComponentState = OMX_StateIdle;

  CAMHAL_LOGVB("OMX_SendCommand(OMX_StateIdle) 0x%x", eError);


  ///Register for EXECUTING state switch event

  ret = RegisterForEvent(mCameraAdapterParameters.mHandleComp,

                         OMX_EventCmdComplete,

                         OMX_CommandStateSet,

                         OMX_StateExecuting,

                         mSwitchToExecSem);

  if(ret!=NO_ERROR)

    {

      CAMHAL_LOGEB("Error in EXECUTING STATE SWITCH %d", ret);

      goto EXIT;

    }

  eError = OMX_SendCommand (mCameraAdapterParameters.mHandleComp ,

                            OMX_CommandStateSet,

                            OMX_StateExecuting,

                            NULL);

  GOTO_EXIT_IF((eError!=OMX_ErrorNone), eError);

  ret = mSwitchToExecSem.WaitTimeout(OMX_CMD_TIMEOUT);

  if (ret != NO_ERROR){

    CAMHAL_LOGEB("Timeout EXEC STATE SWITCH %d", ret);

    goto EXIT;

  }

  mComponentState = OMX_StateExecuting;

  CAMHAL_LOGVB("OMX_SendCommand(OMX_StateExecuting) 0x%x", eError);


  mStateSwitchLock.unlock();


  LOG_FUNCTION_NAME_EXIT;

  return ret;


 EXIT:

  CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);

  performCleanupAfterError();

  mStateSwitchLock.unlock();

  LOG_FUNCTION_NAME_EXIT;

  return (ret | ErrorUtils::omxToAndroidError(eError));

}

上面一连串做了三件事情:1、  disable preview port,注册事件处理通知,等待组件返回处理通知2、  转换状态到IDLE STATE,注册事件处理通知,等待组件返回处理通知3、  转换状态到EXCUTING STATE,注册事件处理通知,等待组件返回处理通知

接下来重点看一下,我们自己申请了buffer,看看怎么通知底层使用我们的buffer而不要从新申请buffer,这个调用最会调用到底层的useBuffer方法,直接看看这个方法的实现吧

status_t OMXCameraAdapter::useBuffers(CameraMode mode, CameraBuffer * bufArr, int num, size_t length, unsigned int queueable)

{

    OMX_ERRORTYPE eError = OMX_ErrorNone;

    status_t ret = NO_ERROR;


    LOG_FUNCTION_NAME;


    switch(mode)

        {

        case CAMERA_PREVIEW:

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mNumBufs =  num;

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex].mMaxQueueable = queueable;

            ret = UseBuffersPreview(bufArr, num);

            break;


        case CAMERA_IMAGE_CAPTURE:

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mNumBufs = num;

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mImagePortIndex].mMaxQueueable = queueable;

            ret = UseBuffersCapture(bufArr, num);

            break;


        case CAMERA_VIDEO:

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mNumBufs =  num;

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex].mMaxQueueable = queueable;

            ret = UseBuffersRawCapture(bufArr, num);

            break;


        case CAMERA_MEASUREMENT:

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mNumBufs = num;

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mMeasurementPortIndex].mMaxQueueable =queueable;

            ret = UseBuffersPreviewData(bufArr, num);

            break;


        case CAMERA_REPROCESS:

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mNumBufs = num;

            mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoInPortIndex].mMaxQueueable =queueable;

            ret = UseBuffersReprocess(bufArr, num);

            break;

        }


    LOG_FUNCTION_NAME_EXIT;


    return ret;

}

这个看看UseBufferPreview这个方法

            eError = OMX_SendCommand(mCameraAdapterParameters.mHandleComp,

                                     OMX_CommandPortEnable,

                                     mCameraAdapterParameters.mPrevPortIndex,

                                     NULL);

        }

 

    ///Configure DOMX to use either gralloc handles or vptrs


status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count)

{

    sp<MemoryHeapBase> heap;

    sp<MemoryBase> buffer;

    unsigned int *bufArr;

    int size = 0;


    LOG_FUNCTION_NAME;


    Mutex::Autolock lock(mLock);


    if ( NULL == mFrameProvider )

        {

        CAMHAL_LOGEA("Trying to start video recording without FrameProvider");

        return -EINVAL;

        }


    if ( mPreviewing )

        {

        CAMHAL_LOGDA("+Already previewing");

        return NO_INIT;

        }


    int w,h;

    ///Get preview size

    params.getPreviewSize(&w, &h);


    // save preview pixel format, size and stride

    mPreviewWidth = w;

    mPreviewHeight = h;

    mPreviewStride = 4096;

    mPreviewPixelFormat = getContstantForPixelFormat(params.getPreviewFormat());

    size = calculateBufferSize(w, h, mPreviewPixelFormat);

    这里根据传入的尺寸信息申请memory,

    mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL);

    if (!mPreviewMemory) {

        return NO_MEMORY;

    }


    for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) {

        mPreviewBuffers[i].type = CAMERA_BUFFER_MEMORY;

        mPreviewBuffers[i].opaque = (unsigned char*) mPreviewMemory->data + (i*size);

        mPreviewBuffers[i].mapped = mPreviewBuffers[i].opaque;

    }


    if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) {

         mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);

    }


    if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_POSTVIEW_FRAME) ) {

         mFrameProvider->enableFrameNotification(CameraFrame::SNAPSHOT_FRAME);

    }


    mPreviewBufCount = 0;


    mPreviewing = true;


    LOG_FUNCTION_NAME_EXIT;

    return NO_ERROR;

}

到这里startPreview的初始化过程就结束了,下面咱们就进到底层看看OMXCameraAdapter是怎样实现开始preview的

/*========================================================*/

/* @ fn SampleTest_FillBufferDone ::  Application callback*/

/*========================================================*/

OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,

                                   OMX_IN OMX_PTR pAppData,

                                   OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)

{

    TIUTILS::Message msg;

    OMX_ERRORTYPE eError = OMX_ErrorNone;


    if (UNLIKELY(mDebugFps)) {

        debugShowFPS();

    }


    OMXCameraAdapter *adapter =  ( OMXCameraAdapter * ) pAppData;

    if ( NULL != adapter )

        {

        msg.command = OMXCameraAdapter::OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE;

        msg.arg1 = ( void * ) hComponent;

        msg.arg2 = ( void * ) pBuffHeader;

        adapter->mOMXCallbackHandler->put(&msg);

        }


    return eError;

}

这里只是打包消息,并发送消息最终是由OMXCallbackHandler中的handle去处理这个消息的

bool OMXCameraAdapter::OMXCallbackHandler::Handler()

{

    TIUTILS::Message msg;

    volatile int forever = 1;

    status_t ret = NO_ERROR;


    LOG_FUNCTION_NAME;


    while(forever){

        TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);

        {检查到消息,接着往下走

        Mutex::Autolock lock(mLock);

        mCommandMsgQ.get(&msg);

        mIsProcessed = false;

        }


        switch ( msg.command ) {

            case OMXCallbackHandler::CAMERA_FILL_BUFFER_DONE:

            {

                ret = mCameraAdapter->OMXCameraAdapterFillBufferDone(( OMX_HANDLETYPE ) msg.arg1,

                                                                     ( OMX_BUFFERHEADERTYPE *) msg.arg2);

                break;

            }

            case OMXCallbackHandler::CAMERA_FOCUS_STATUS:

            {

                mCameraAdapter->handleFocusCallback();

                break;

            }

            case CommandHandler::COMMAND_EXIT:

            {

                CAMHAL_LOGDA("Exiting OMX callback handler");

                forever = 0;

                break;

            }

        }


        {

            android::AutoMutex locker(mLock);

            CAMHAL_UNUSED(locker);


            mIsProcessed = mCommandMsgQ.isEmpty();

            if ( mIsProcessed )

                mCondition.signal();

        }

    }


    // force the condition to wake

    {

        android::AutoMutex locker(mLock);

        CAMHAL_UNUSED(locker);


        mIsProcessed = true;

        mCondition.signal();

    }


    LOG_FUNCTION_NAME_EXIT;

    return false;

}

检查到fillBufferDone消息,调用OMXCameraAdapter下的fillBufferDone处理方法




原创粉丝点击