二维码扫描逻辑梳理

来源:互联网 发布:笔杆子软件官网 编辑:程序博客网 时间:2024/05/20 23:57

http://coolshell.cn/articles/10590.html(二维码生成细节及原理)

http://blog.csdn.net/u012917700/article/details/52369175(二维码扫描及优化策略)

http://blog.csdn.net/Tau_Chan/article/category/1846313(打造极致二维码详解)

打开摄像头扫描二维码

  • 拍照相关主要类SurfaceView SurfaceHolder Camera
    //SurfaceView可以在xml中定义,Activity中初始化    SurfaceView surfaceView = (SurfaceView)findViewById(R.id.surface_view);    //获得SurfaceHolder引用    SurfaceHolder surfaceHolder = surfaceView.getHolder();    //设置回调监听 callback包括surfaceCreated()  surfaceChanged() surfaceDestroyed()回调,可以surfaceCreated()方法中执行打开摄像头动作    surfaceHolder.addCallback(this);    //根据屏幕宽高比设置相机预览分辨率 解决预览图变形问题    public void initFromCameraParameters(Camera camera) {        Camera.Parameters parameters = camera.getParameters();        WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);        Display display = manager.getDefaultDisplay();        screenResolution = getDisplaySize(display);        Log.i(TAG, "Screen resolution: " + screenResolution);        Point screenResolutionForCamera = new Point();        screenResolutionForCamera.x = screenResolution.x;        screenResolutionForCamera.y = screenResolution.y;        /** 因为换成了竖屏显示,所以不替换屏幕宽高得出的预览图是变形的 */        if (screenResolution.x < screenResolution.y) {            screenResolutionForCamera.x = screenResolution.y;            screenResolutionForCamera.y = screenResolution.x;        }        cameraResolution = findBestPreviewSizeValue(parameters, screenResolutionForCamera);        Log.i(TAG, "Camera resolution x: " + cameraResolution.x);        Log.i(TAG, "Camera resolution y: " + cameraResolution.y);    }    /**     * 从相机支持的分辨率中计算出最适合的预览界面尺寸     *      * @param parameters     * @param screenResolution     * @return     */    private Point findBestPreviewSizeValue(Camera.Parameters parameters, Point screenResolution) {        List<Camera.Size> rawSupportedSizes = parameters.getSupportedPreviewSizes();        if (rawSupportedSizes == null) {            Log.w(TAG, "Device returned no supported preview sizes; using default");            Camera.Size defaultSize = parameters.getPreviewSize();            return new Point(defaultSize.width, defaultSize.height);        }        // Sort by size, descending        List<Camera.Size> supportedPreviewSizes = new ArrayList<Camera.Size>(rawSupportedSizes);        Collections.sort(supportedPreviewSizes, (a, b) -> {            int aPixels = a.height * a.width;            int bPixels = b.height * b.width;            if (bPixels < aPixels) {                return -1;            }            if (bPixels > aPixels) {                return 1;            }            return 0;        });        if (Log.isLoggable(TAG, Log.INFO)) {            StringBuilder previewSizesString = new StringBuilder();            for (Camera.Size supportedPreviewSize : supportedPreviewSizes) {                previewSizesString.append(supportedPreviewSize.width).append('x').append(supportedPreviewSize.height).append(' ');            }            Log.i(TAG, "Supported preview sizes: " + previewSizesString);        }        double screenAspectRatio = (double) screenResolution.x / (double) screenResolution.y;        // Remove sizes that are unsuitable        Iterator<Camera.Size> it = supportedPreviewSizes.iterator();        while (it.hasNext()) {            Camera.Size supportedPreviewSize = it.next();            int realWidth = supportedPreviewSize.width;            int realHeight = supportedPreviewSize.height;            if (realWidth * realHeight < MIN_PREVIEW_PIXELS) {                it.remove();                continue;            }            boolean isCandidatePortrait = realWidth < realHeight;            int maybeFlippedWidth = isCandidatePortrait ? realHeight : realWidth;            int maybeFlippedHeight = isCandidatePortrait ? realWidth : realHeight;            double aspectRatio = (double) maybeFlippedWidth / (double) maybeFlippedHeight;            double distortion = Math.abs(aspectRatio - screenAspectRatio);            if (distortion > MAX_ASPECT_DISTORTION) {                it.remove();                continue;            }            if (maybeFlippedWidth == screenResolution.x && maybeFlippedHeight == screenResolution.y) {                Point exactPoint = new Point(realWidth, realHeight);                Log.i(TAG, "Found preview size exactly matching screen size: " + exactPoint);                return exactPoint;            }        }        // If no exact match, use largest preview size. This was not a great        // idea on older devices because        // of the additional computation needed. We're likely to get here on        // newer Android 4+ devices, where        // the CPU is much more powerful.        if (!supportedPreviewSizes.isEmpty()) {            Camera.Size largestPreview = supportedPreviewSizes.get(0);            Point largestSize = new Point(largestPreview.width, largestPreview.height);            Log.i(TAG, "Using largest suitable preview size: " + largestSize);            return largestSize;        }        // If there is nothing at all suitable, return current preview size        Camera.Size defaultPreview = parameters.getPreviewSize();        Point defaultSize = new Point(defaultPreview.width, defaultPreview.height);        Log.i(TAG, "No suitable preview sizes, using default: " + defaultSize);        return defaultSize;    }
  • 打开摄像头
    /**     * Opens the requested camera with {@link Camera#open(int)}, if one exists.     *      * @param cameraId     *            camera ID of the camera to use. A negative value means     *            "no preference"     * @return handle to {@link Camera} that was opened     */    public static Camera open(int cameraId) {        int numCameras = Camera.getNumberOfCameras();        if (numCameras == 0) {            Log.w(TAG, "No cameras!");            return null;        }        boolean explicitRequest = cameraId >= 0;        if (!explicitRequest) {            // Select a camera if no explicit camera requested            int index = 0;            while (index < numCameras) {                Camera.CameraInfo cameraInfo = new Camera.CameraInfo();                Camera.getCameraInfo(index, cameraInfo);                if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {                    break;                }                index++;            }            cameraId = index;        }        Camera camera;        if (cameraId < numCameras) {            Log.i(TAG, "Opening camera #" + cameraId);            camera = Camera.open(cameraId);        } else {            if (explicitRequest) {                Log.w(TAG, "Requested camera does not exist: " + cameraId);                camera = null;            } else {                Log.i(TAG, "No camera facing clickFreeTry; returning camera #0");                camera = Camera.open(0);            }        }        return camera;    }
  • 不停的执行自动对焦的线程
    public class AutoFocusManager implements Camera.AutoFocusCallback {    private static final String TAG = in.haojin.nearbymerchant.zxing.camera.AutoFocusManager.class.getSimpleName();    private static final long AUTO_FOCUS_INTERVAL_MS = 2000L;    private static final Collection<String> FOCUS_MODES_CALLING_AF;    static {        FOCUS_MODES_CALLING_AF = new ArrayList<String>(2);        FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_AUTO);        FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_MACRO);    }    private boolean stopped;    private boolean focusing;    private final boolean useAutoFocus;    private final Camera camera;    private AsyncTask<?, ?, ?> outstandingTask;    public AutoFocusManager(Context context, Camera camera) {        this.camera = camera;        String currentFocusMode = camera.getParameters().getFocusMode();        useAutoFocus = FOCUS_MODES_CALLING_AF.contains(currentFocusMode);        Log.i(TAG, "Current focus mode '" + currentFocusMode + "'; use auto focus? " + useAutoFocus);        start();    }    @Override    public synchronized void onAutoFocus(boolean success, Camera theCamera) {        focusing = false;        autoFocusAgainLater();    }    @SuppressLint("NewApi")    private synchronized void autoFocusAgainLater() {        if (!stopped && outstandingTask == null) {            AutoFocusTask newTask = new AutoFocusTask();            try {                if (Build.VERSION.SDK_INT >= 11) {                    newTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);                } else {                    newTask.execute();                }                outstandingTask = newTask;            } catch (RejectedExecutionException ree) {                Log.w(TAG, "Could not request auto focus", ree);            }        }    }    public synchronized void start() {        if (useAutoFocus) {            outstandingTask = null;            if (!stopped && !focusing) {                try {                    camera.autoFocus(this);                    focusing = true;                } catch (RuntimeException re) {                    // Have heard RuntimeException reported in Android 4.0.x+;                    // continue?                    Log.w(TAG, "Unexpected exception while focusing", re);                    // Try again later to keep cycle going                    autoFocusAgainLater();                }            }        }    }    private synchronized void cancelOutstandingTask() {        if (outstandingTask != null) {            if (outstandingTask.getStatus() != AsyncTask.Status.FINISHED) {                outstandingTask.cancel(true);            }            outstandingTask = null;        }    }    public synchronized void stop() {        stopped = true;        if (useAutoFocus) {            cancelOutstandingTask();            // Doesn't hurt to call this even if not focusing            try {                camera.cancelAutoFocus();            } catch (RuntimeException re) {                // Have heard RuntimeException reported in Android 4.0.x+;                // continue?                Log.w(TAG, "Unexpected exception while cancelling focusing", re);            }        }    }    private final class AutoFocusTask extends AsyncTask<Object, Object, Object> {        @Override        protected Object doInBackground(Object... voids) {            try {                Thread.sleep(AUTO_FOCUS_INTERVAL_MS);            } catch (InterruptedException e) {                // continue            }            start();            return null;        }    }}
  • 发起预览请求并处理预览得到的数据
    /**     * A single preview frame will be returned to the handler supplied. The data     * will arrive as byte[] in the message.obj field, with width and height     * encoded as message.arg1 and message.arg2, respectively.     *      * @param handler     *            The handler to send the message to.     * @param message     *            The what field of the message to be sent.     */    public synchronized void requestPreviewFrame(Handler handler, int message) {        Camera theCamera = camera;        if (theCamera != null && previewing) {            previewCallback.setHandler(handler, message);            theCamera.setOneShotPreviewCallback(previewCallback);        }    }    //预览回调得到获取的图像数据并回调数据到    public class PreviewCallback implements Camera.PreviewCallback {    private static final String TAG = PreviewCallback.class.getSimpleName();    private final CameraConfigurationManager configManager;    private Handler previewHandler;    private int previewMessage;    public PreviewCallback(CameraConfigurationManager configManager) {        this.configManager = configManager;    }    public void setHandler(Handler previewHandler, int previewMessage) {        this.previewHandler = previewHandler;        this.previewMessage = previewMessage;    }    @Override    public void onPreviewFrame(byte[] data, Camera camera) {        Point cameraResolution = configManager.getCameraResolution();        Handler thePreviewHandler = previewHandler;        if (cameraResolution != null && thePreviewHandler != null) {            Message message = thePreviewHandler.obtainMessage(previewMessage, cameraResolution.x, cameraResolution.y, data);            message.sendToTarget();            previewHandler = null;        } else {            Log.d(TAG, "Got preview callback, but no handler or resolution available");        }    }}
  • 执行解析
    public class DecodeHandlerV2 extends Handler {    //    private final CaptureActivity activity;    private final MultiFormatReader multiFormatReader;    private boolean running = true;    private CaptureActivityHandlerV2 captureActivityHandler;    private Rect cropRect;    private Context context;    public DecodeHandlerV2(Context context, Map<DecodeHintType, Object> hints,                           CaptureActivityHandlerV2 captureActivityHandler, Rect cropRect) {        multiFormatReader = new MultiFormatReader();        multiFormatReader.setHints(hints);        this.context = context;        this.captureActivityHandler = captureActivityHandler;        this.cropRect = cropRect;    }    @Override    public void handleMessage(Message message) {        if (!running) {            return;        }        switch (message.what) {            case in.haojin.nearbymerchant.zxing.utils.ConstV2.decode:                decode((byte[]) message.obj, message.arg1, message.arg2);                break;            case in.haojin.nearbymerchant.zxing.utils.ConstV2.quit:                running = false;                Looper.myLooper().quit();                break;        }    }    /**     * Decode the data within the viewfinder rectangle, and time how long it     * took. For efficiency, reuse the same reader objects from one decode to     * the next.     *     * @param data   The YUV preview frame.     * @param width  The width of the preview frame.     * @param height The height of the preview frame.     */    private void decode(byte[] data, int width, int height) {        Size size = CameraManager.getInstance(context).getPreviewSize();        if (size == null) {            return;        }        // 这里需要将获取的data翻转一下,因为相机默认拿的的横屏的数据        byte[] rotatedData = new byte[data.length];        for (int y = 0; y < size.height; y++) {            for (int x = 0; x < size.width; x++)                rotatedData[x * size.height + size.height - y - 1] = data[x + y * size.width];        }        // 宽高也要调整        int tmp = size.width;        size.width = size.height;        size.height = tmp;        Result rawResult = null;        PlanarYUVLuminanceSource source = buildLuminanceSource(rotatedData, size.width, size.height);        if (source != null) {            BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));            try {                rawResult = multiFormatReader.decodeWithState(bitmap);            } catch (ReaderException re) {                // continue            } finally {                multiFormatReader.reset();            }        }        Handler handler = captureActivityHandler;        if (rawResult != null) {            // Don't log the barcode contents for security.            if (handler != null) {                Message message = Message.obtain(handler, CaptureActivityHandlerV2.MSG_DECODE_SUCCEEDED, rawResult);                Bundle bundle = new Bundle();                bundleThumbnail(source, bundle);                message.setData(bundle);                message.sendToTarget();            }        } else {            if (handler != null) {                Message message = Message.obtain(handler, CaptureActivityHandlerV2.MSG_DECODE_FAILED);                message.sendToTarget();            }        }    }    private static void bundleThumbnail(PlanarYUVLuminanceSource source, Bundle bundle) {        int[] pixels = source.renderThumbnail();        int width = source.getThumbnailWidth();        int height = source.getThumbnailHeight();        Bitmap bitmap = Bitmap.createBitmap(pixels, 0, width, width, height, Bitmap.Config.ARGB_8888);        ByteArrayOutputStream out = new ByteArrayOutputStream();        bitmap.compress(Bitmap.CompressFormat.JPEG, 50, out);        bundle.putByteArray(DecodeThreadV2.BARCODE_BITMAP, out.toByteArray());    }    /**     * A factory method to build the appropriate LuminanceSource object based on     * the format of the preview buffers, as described by Camera.Parameters.     *     * @param data   A preview frame.     * @param width  The width of the image.     * @param height The height of the image.     * @return A PlanarYUVLuminanceSource instance.     */    public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {        Rect rect = cropRect;        if (rect == null) {            return null;        }        // Go ahead and assume it's YUV rather than die.        return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.width(), rect.height(), false);    }}
0 0
原创粉丝点击