Android N Graphics之BitmapFactory

来源:互联网 发布:园林景观设计制图软件 编辑:程序博客网 时间:2024/06/03 20:47

首先我们来看BitmapFactory提供给应用层的接口,一层一层往下分析:

一. 应用在加载图像的时候可以调用decodeFile来加载指定文件:

        /**     * Decode a file path into a bitmap. If the specified file name is null,     * or cannot be decoded into a bitmap, the function returns null.     *     * @param pathName complete path name for the file to be decoded.     * @return the resulting decoded bitmap, or null if it could not be decoded.     */    public static Bitmap decodeFile(String pathName) {        return decodeFile(pathName, null);    }    public static Bitmap decodeFile(String pathName, Options opts) {        Bitmap bm = null;        InputStream stream = null;        try {            stream = new FileInputStream(pathName);            bm = decodeStream(stream, null, opts);        } catch (Exception e) {            /*  do nothing.                If the exception happened on open, bm will be null.            */            Log.e("BitmapFactory", "Unable to decode stream: " + e);        } finally {            if (stream != null) {                try {                    stream.close();                } catch (IOException e) {                    // do nothing here                }            }        }        return bm;    }

由代码逻辑可知,在decodeFile中,首先会去创建一个Bitmap和InputStream,分别用来接口解码后的图像以及接受指定文件的数据流,最终的解码实现是由decodeStream完成的,这个接口是对指定输入文件的处理;

    /**     * Decode an input stream into a bitmap. If the input stream is null, or     * cannot be used to decode a bitmap, the function returns null.     * The stream's position will be where ever it was after the encoded data     * was read.     *     * @param is The input stream that holds the raw data to be decoded into a     *           bitmap.     * @param outPadding If not null, return the padding rect for the bitmap if     *                   it exists, otherwise set padding to [-1,-1,-1,-1]. If     *                   no bitmap is returned (null) then padding is     *                   unchanged.     * @param opts null-ok; Options that control downsampling and whether the     *             image should be completely decoded, or just is size returned.     * @return The decoded bitmap, or null if the image data could not be     *         decoded, or, if opts is non-null, if opts requested only the     *         size be returned (in opts.outWidth and opts.outHeight)     *     * <p class="note">Prior to {@link android.os.Build.VERSION_CODES#KITKAT},     * if {@link InputStream#markSupported is.markSupported()} returns true,     * <code>is.mark(1024)</code> would be called. As of     * {@link android.os.Build.VERSION_CODES#KITKAT}, this is no longer the case.</p>     */    public static Bitmap decodeStream(InputStream is, Rect outPadding, Options opts) {        // we don't throw in this case, thus allowing the caller to only check        // the cache, and not force the image to be decoded.        if (is == null) {            return null;        }        Bitmap bm = null;        Trace.traceBegin(Trace.TRACE_TAG_GRAPHICS, "decodeBitmap");        try {            if (is instanceof AssetManager.AssetInputStream) {                final long asset = ((AssetManager.AssetInputStream) is).getNativeAsset();                bm = nativeDecodeAsset(asset, outPadding, opts);            } else {                bm = decodeStreamInternal(is, outPadding, opts);            }            if (bm == null && opts != null && opts.inBitmap != null) {                throw new IllegalArgumentException("Problem decoding into existing bitmap");            }            setDensityFromOptions(bm, opts);        } finally {            Trace.traceEnd(Trace.TRACE_TAG_GRAPHICS);        }        return bm;    }    /**     * Private helper function for decoding an InputStream natively. Buffers the input enough to     * do a rewind as needed, and supplies temporary storage if necessary. is MUST NOT be null.     */    private static Bitmap decodeStreamInternal(InputStream is, Rect outPadding, Options opts) {        // ASSERT(is != null);        byte [] tempStorage = null;        if (opts != null) tempStorage = opts.inTempStorage;        if (tempStorage == null) tempStorage = new byte[DECODE_BUFFER_SIZE];        return nativeDecodeStream(is, tempStorage, outPadding, opts);    }

由注释其实就可以得知,decodeStream的主要功能是将输入的数据流编码成一个Bitmap对象,而这一步的操作主要是由nativeDecodeAssetnativeDecodeStream完成的;

2.从文件描述符开始加载

    /**     * Decode a bitmap from the file descriptor. If the bitmap cannot be decoded     * return null. The position within the descriptor will not be changed when     * this returns, so the descriptor can be used again as is.     *     * @param fd The file descriptor containing the bitmap data to decode     * @return the decoded bitmap, or null     */    public static Bitmap decodeFileDescriptor(FileDescriptor fd) {        return decodeFileDescriptor(fd, null, null);    }    public static Bitmap decodeFileDescriptor(FileDescriptor fd, Rect outPadding, Options opts) {        Bitmap bm;        Trace.traceBegin(Trace.TRACE_TAG_GRAPHICS, "decodeFileDescriptor");        try {            if (nativeIsSeekable(fd)) {                bm = nativeDecodeFileDescriptor(fd, outPadding, opts);            } else {                FileInputStream fis = new FileInputStream(fd);                try {                    bm = decodeStreamInternal(fis, outPadding, opts);                } finally {                    try {                        fis.close();                    } catch (Throwable t) {/* ignore */}                }            }            if (bm == null && opts != null && opts.inBitmap != null) {                throw new IllegalArgumentException("Problem decoding into existing bitmap");            }            setDensityFromOptions(bm, opts);        } finally {            Trace.traceEnd(Trace.TRACE_TAG_GRAPHICS);        }        return bm;    }

同理其也是通过nativeDecodeFileDescriptornativeDecodeStream来解析数据的数据;

3.从资源文件中加载图像

    /**     * Synonym for {@link #decodeResource(Resources, int, android.graphics.BitmapFactory.Options)}     * with null Options.     *     * @param res The resources object containing the image data     * @param id The resource id of the image data     * @return The decoded bitmap, or null if the image could not be decoded.     */    public static Bitmap decodeResource(Resources res, int id) {        return decodeResource(res, id, null);    }    public static Bitmap decodeResource(Resources res, int id, Options opts) {        Bitmap bm = null;        InputStream is = null;         try {            final TypedValue value = new TypedValue();            is = res.openRawResource(id, value);            bm = decodeResourceStream(res, value, is, null, opts);        } catch (Exception e) {            /*  do nothing.                If the exception happened on open, bm will be null.                If it happened on close, bm is still valid.            */        } finally {            try {                if (is != null) is.close();            } catch (IOException e) {                // Ignore            }        }        if (bm == null && opts != null && opts.inBitmap != null) {            throw new IllegalArgumentException("Problem decoding into existing bitmap");        }        return bm;    }    public static Bitmap decodeResourceStream(Resources res, TypedValue value,            InputStream is, Rect pad, Options opts) {        if (opts == null) {            opts = new Options();        }        if (opts.inDensity == 0 && value != null) {            final int density = value.density;            if (density == TypedValue.DENSITY_DEFAULT) {                opts.inDensity = DisplayMetrics.DENSITY_DEFAULT;            } else if (density != TypedValue.DENSITY_NONE) {                opts.inDensity = density;            }        }        if (opts.inTargetDensity == 0 && res != null) {            opts.inTargetDensity = res.getDisplayMetrics().densityDpi;        }        return decodeStream(is, pad, opts);    }

可以见从资源文件中加载图像,最终是从decodeStream去加载的,decodeStream如前面介绍的一样;

4.从文件流中加载

    public static Bitmap decodeStream(InputStream is) {        return decodeStream(is, null, null);    }

这里就不继续介绍了,和上面是一样的;

5.从二进制文件中加载

   /**     * Decode an immutable bitmap from the specified byte array.     *     * @param data byte array of compressed image data     * @param offset offset into imageData for where the decoder should begin     *               parsing.     * @param length the number of bytes, beginning at offset, to parse     * @return The decoded bitmap, or null if the image could not be decoded.     */    public static Bitmap decodeByteArray(byte[] data, int offset, int length) {        return decodeByteArray(data, offset, length, null);    }    public static Bitmap decodeByteArray(byte[] data, int offset, int length, Options opts) {        if ((offset | length) < 0 || data.length < offset + length) {            throw new ArrayIndexOutOfBoundsException();        }        Bitmap bm;        Trace.traceBegin(Trace.TRACE_TAG_GRAPHICS, "decodeBitmap");        try {            bm = nativeDecodeByteArray(data, offset, length, opts);            if (bm == null && opts != null && opts.inBitmap != null) {                throw new IllegalArgumentException("Problem decoding into existing bitmap");            }            setDensityFromOptions(bm, opts);        } finally {            Trace.traceEnd(Trace.TRACE_TAG_GRAPHICS);        }        return bm;    }

过程和其他都是一样的,最终是通过nativeDecodeByteArray来实现解码的。

如上对不同的输入会有不用的解码方式,去最终实现主要为:

nativeDecodeStream
nativeDecodeFileDescriptor
nativeDecodeAsset
nativeDecodeByteArray
接下来我们继续去看BitmapFactory.cpp中其实现;

static jobject nativeDecodeStream(JNIEnv* env, jobject clazz, jobject is, jbyteArray storage,        jobject padding, jobject options) {    jobject bitmap = NULL;    std::unique_ptr<SkStream> stream(CreateJavaInputStreamAdaptor(env, is, storage));    if (stream.get()) {        std::unique_ptr<SkStreamRewindable> bufferedStream(                SkFrontBufferedStream::Create(stream.release(), SkCodec::MinBufferedBytesNeeded()));        SkASSERT(bufferedStream.get() != NULL);        bitmap = doDecode(env, bufferedStream.release(), padding, options);    }    return bitmap;}

本文将主要以此接口来分析,因为其他几个native函数的主要实现和这个接口的实现是类似的。最终都是调用doDecode来实现的。
首先来分析一下nativeDecodeStream的实现,可见在nativeDecodeStream中首先会去创建一个bitmap 对象,接着来创建一个JavaInputStreamAdaptor,这是一个输入流的适配器,然后通过doDecode来实现解码,这个接口的实现巨长,先不管怎么说,贴上源码,下面来一点点分析:

static jobject doDecode(JNIEnv* env, SkStreamRewindable* stream, jobject padding, jobject options) {    //输入的三个参数分别是流对象、边距对象以及图片选项参数    // This function takes ownership of the input stream.  Since the SkAndroidCodec    // will take ownership of the stream, we don't necessarily need to take ownership    // here.  This is a precaution - if we were to return before creating the codec,    // we need to make sure that we delete the stream.    std::unique_ptr<SkStreamRewindable> streamDeleter(stream);    // Set default values for the options parameters.    //参数的初始化    int sampleSize = 1;    bool onlyDecodeSize = false;    SkColorType prefColorType = kN32_SkColorType;    bool isMutable = false;    float scale = 1.0f;    bool requireUnpremultiplied = false;    jobject javaBitmap = NULL;    // Update with options supplied by the client.    if (options != NULL) {        sampleSize = env->GetIntField(options, gOptions_sampleSizeFieldID);        // Correct a non-positive sampleSize.  sampleSize defaults to zero within the        // options object, which is strange.        if (sampleSize <= 0) {            sampleSize = 1;        }        if (env->GetBooleanField(options, gOptions_justBoundsFieldID)) {            onlyDecodeSize = true;        }        // initialize these, in case we fail later on        env->SetIntField(options, gOptions_widthFieldID, -1);        env->SetIntField(options, gOptions_heightFieldID, -1);        env->SetObjectField(options, gOptions_mimeFieldID, 0);        jobject jconfig = env->GetObjectField(options, gOptions_configFieldID);        prefColorType = GraphicsJNI::getNativeBitmapColorType(env, jconfig);        isMutable = env->GetBooleanField(options, gOptions_mutableFieldID);        requireUnpremultiplied = !env->GetBooleanField(options, gOptions_premultipliedFieldID);        javaBitmap = env->GetObjectField(options, gOptions_bitmapFieldID);        if (env->GetBooleanField(options, gOptions_scaledFieldID)) {            const int density = env->GetIntField(options, gOptions_densityFieldID);            const int targetDensity = env->GetIntField(options, gOptions_targetDensityFieldID);            const int screenDensity = env->GetIntField(options, gOptions_screenDensityFieldID);            if (density != 0 && targetDensity != 0 && density != screenDensity) {                scale = (float) targetDensity / density;            }        }    }    // Create the codec.    NinePatchPeeker peeker;    std::unique_ptr<SkAndroidCodec> codec(SkAndroidCodec::NewFromStream(streamDeleter.release(),            &peeker));    if (!codec.get()) {        return nullObjectReturn("SkAndroidCodec::NewFromStream returned null");    }    // Do not allow ninepatch decodes to 565.  In the past, decodes to 565    // would dither, and we do not want to pre-dither ninepatches, since we    // know that they will be stretched.  We no longer dither 565 decodes,    // but we continue to prevent ninepatches from decoding to 565, in order    // to maintain the old behavior.    if (peeker.mPatch && kRGB_565_SkColorType == prefColorType) {        prefColorType = kN32_SkColorType;    }    // Determine the output size.    SkISize size = codec->getSampledDimensions(sampleSize);    int scaledWidth = size.width();    int scaledHeight = size.height();    bool willScale = false;    // Apply a fine scaling step if necessary.    if (needsFineScale(codec->getInfo().dimensions(), size, sampleSize)) {        willScale = true;        scaledWidth = codec->getInfo().width() / sampleSize;        scaledHeight = codec->getInfo().height() / sampleSize;    }    // Set the options and return if the client only wants the size.    if (options != NULL) {        jstring mimeType = encodedFormatToString(env, codec->getEncodedFormat());        if (env->ExceptionCheck()) {            return nullObjectReturn("OOM in encodedFormatToString()");        }        env->SetIntField(options, gOptions_widthFieldID, scaledWidth);        env->SetIntField(options, gOptions_heightFieldID, scaledHeight);        env->SetObjectField(options, gOptions_mimeFieldID, mimeType);        if (onlyDecodeSize) {            return nullptr;        }    }    // Scale is necessary due to density differences.    if (scale != 1.0f) {        willScale = true;        scaledWidth = static_cast<int>(scaledWidth * scale + 0.5f);        scaledHeight = static_cast<int>(scaledHeight * scale + 0.5f);    }    android::Bitmap* reuseBitmap = nullptr;    unsigned int existingBufferSize = 0;    if (javaBitmap != NULL) {        reuseBitmap = GraphicsJNI::getBitmap(env, javaBitmap);        if (reuseBitmap->peekAtPixelRef()->isImmutable()) {            ALOGW("Unable to reuse an immutable bitmap as an image decoder target.");            javaBitmap = NULL;            reuseBitmap = nullptr;        } else {            existingBufferSize = GraphicsJNI::getBitmapAllocationByteCount(env, javaBitmap);        }    }    JavaPixelAllocator javaAllocator(env);    RecyclingPixelAllocator recyclingAllocator(reuseBitmap, existingBufferSize);    ScaleCheckingAllocator scaleCheckingAllocator(scale, existingBufferSize);    SkBitmap::HeapAllocator heapAllocator;    SkBitmap::Allocator* decodeAllocator;    if (javaBitmap != nullptr && willScale) {        // This will allocate pixels using a HeapAllocator, since there will be an extra        // scaling step that copies these pixels into Java memory.  This allocator        // also checks that the recycled javaBitmap is large enough.        decodeAllocator = &scaleCheckingAllocator;    } else if (javaBitmap != nullptr) {        decodeAllocator = &recyclingAllocator;    } else if (willScale) {        // This will allocate pixels using a HeapAllocator, since there will be an extra        // scaling step that copies these pixels into Java memory.        decodeAllocator = &heapAllocator;    } else {        decodeAllocator = &javaAllocator;    }    // Set the decode colorType.  This is necessary because we can't always support    // the requested colorType.    SkColorType decodeColorType = codec->computeOutputColorType(prefColorType);    // Construct a color table for the decode if necessary    SkAutoTUnref<SkColorTable> colorTable(nullptr);    SkPMColor* colorPtr = nullptr;    int* colorCount = nullptr;    int maxColors = 256;    SkPMColor colors[256];    if (kIndex_8_SkColorType == decodeColorType) {        colorTable.reset(new SkColorTable(colors, maxColors));        // SkColorTable expects us to initialize all of the colors before creating an        // SkColorTable.  However, we are using SkBitmap with an Allocator to allocate        // memory for the decode, so we need to create the SkColorTable before decoding.        // It is safe for SkAndroidCodec to modify the colors because this SkBitmap is        // not being used elsewhere.        colorPtr = const_cast<SkPMColor*>(colorTable->readColors());        colorCount = &maxColors;    }    // Set the alpha type for the decode.    SkAlphaType alphaType = codec->computeOutputAlphaType(requireUnpremultiplied);    const SkImageInfo decodeInfo = SkImageInfo::Make(size.width(), size.height(), decodeColorType,            alphaType);    SkImageInfo bitmapInfo = decodeInfo;    if (decodeColorType == kGray_8_SkColorType) {        // The legacy implementation of BitmapFactory used kAlpha8 for        // grayscale images (before kGray8 existed).  While the codec        // recognizes kGray8, we need to decode into a kAlpha8 bitmap        // in order to avoid a behavior change.        bitmapInfo = SkImageInfo::MakeA8(size.width(), size.height());    }    SkBitmap decodingBitmap;    if (!decodingBitmap.setInfo(bitmapInfo) ||            !decodingBitmap.tryAllocPixels(decodeAllocator, colorTable)) {        // SkAndroidCodec should recommend a valid SkImageInfo, so setInfo()        // should only only fail if the calculated value for rowBytes is too        // large.        // tryAllocPixels() can fail due to OOM on the Java heap, OOM on the        // native heap, or the recycled javaBitmap being too small to reuse.        return nullptr;    }    // Use SkAndroidCodec to perform the decode.    SkAndroidCodec::AndroidOptions codecOptions;    codecOptions.fZeroInitialized = (decodeAllocator == &javaAllocator) ?            SkCodec::kYes_ZeroInitialized : SkCodec::kNo_ZeroInitialized;    codecOptions.fColorPtr = colorPtr;    codecOptions.fColorCount = colorCount;    codecOptions.fSampleSize = sampleSize;    SkCodec::Result result = codec->getAndroidPixels(decodeInfo, decodingBitmap.getPixels(),            decodingBitmap.rowBytes(), &codecOptions);    switch (result) {        case SkCodec::kSuccess:        case SkCodec::kIncompleteInput:            break;        default:            return nullObjectReturn("codec->getAndroidPixels() failed.");    }    jbyteArray ninePatchChunk = NULL;    if (peeker.mPatch != NULL) {        if (willScale) {            scaleNinePatchChunk(peeker.mPatch, scale, scaledWidth, scaledHeight);        }        size_t ninePatchArraySize = peeker.mPatch->serializedSize();        ninePatchChunk = env->NewByteArray(ninePatchArraySize);        if (ninePatchChunk == NULL) {            return nullObjectReturn("ninePatchChunk == null");        }        jbyte* array = (jbyte*) env->GetPrimitiveArrayCritical(ninePatchChunk, NULL);        if (array == NULL) {            return nullObjectReturn("primitive array == null");        }        memcpy(array, peeker.mPatch, peeker.mPatchSize);        env->ReleasePrimitiveArrayCritical(ninePatchChunk, array, 0);    }    jobject ninePatchInsets = NULL;    if (peeker.mHasInsets) {        ninePatchInsets = env->NewObject(gInsetStruct_class, gInsetStruct_constructorMethodID,                peeker.mOpticalInsets[0], peeker.mOpticalInsets[1], peeker.mOpticalInsets[2], peeker.mOpticalInsets[3],                peeker.mOutlineInsets[0], peeker.mOutlineInsets[1], peeker.mOutlineInsets[2], peeker.mOutlineInsets[3],                peeker.mOutlineRadius, peeker.mOutlineAlpha, scale);        if (ninePatchInsets == NULL) {            return nullObjectReturn("nine patch insets == null");        }        if (javaBitmap != NULL) {            env->SetObjectField(javaBitmap, gBitmap_ninePatchInsetsFieldID, ninePatchInsets);        }    }    SkBitmap outputBitmap;    if (willScale) {        // This is weird so let me explain: we could use the scale parameter        // directly, but for historical reasons this is how the corresponding        // Dalvik code has always behaved. We simply recreate the behavior here.        // The result is slightly different from simply using scale because of        // the 0.5f rounding bias applied when computing the target image size        const float sx = scaledWidth / float(decodingBitmap.width());        const float sy = scaledHeight / float(decodingBitmap.height());        // Set the allocator for the outputBitmap.        SkBitmap::Allocator* outputAllocator;        if (javaBitmap != nullptr) {            outputAllocator = &recyclingAllocator;        } else {            outputAllocator = &javaAllocator;        }        SkColorType scaledColorType = colorTypeForScaledOutput(decodingBitmap.colorType());        // FIXME: If the alphaType is kUnpremul and the image has alpha, the        // colors may not be correct, since Skia does not yet support drawing        // to/from unpremultiplied bitmaps.        outputBitmap.setInfo(SkImageInfo::Make(scaledWidth, scaledHeight,                scaledColorType, decodingBitmap.alphaType()));        if (!outputBitmap.tryAllocPixels(outputAllocator, NULL)) {            // This should only fail on OOM.  The recyclingAllocator should have            // enough memory since we check this before decoding using the            // scaleCheckingAllocator.            return nullObjectReturn("allocation failed for scaled bitmap");        }        SkPaint paint;        // kSrc_Mode instructs us to overwrite the unininitialized pixels in        // outputBitmap.  Otherwise we would blend by default, which is not        // what we want.        paint.setXfermodeMode(SkXfermode::kSrc_Mode);        paint.setFilterQuality(kLow_SkFilterQuality);        SkCanvas canvas(outputBitmap);        canvas.scale(sx, sy);        canvas.drawBitmap(decodingBitmap, 0.0f, 0.0f, &paint);    } else {        outputBitmap.swap(decodingBitmap);    }    if (padding) {        if (peeker.mPatch != NULL) {            GraphicsJNI::set_jrect(env, padding,                    peeker.mPatch->paddingLeft, peeker.mPatch->paddingTop,                    peeker.mPatch->paddingRight, peeker.mPatch->paddingBottom);        } else {            GraphicsJNI::set_jrect(env, padding, -1, -1, -1, -1);        }    }    // If we get here, the outputBitmap should have an installed pixelref.    if (outputBitmap.pixelRef() == NULL) {        return nullObjectReturn("Got null SkPixelRef");    }    if (!isMutable && javaBitmap == NULL) {        // promise we will never change our pixels (great for sharing and pictures)        outputBitmap.setImmutable();    }    bool isPremultiplied = !requireUnpremultiplied;    if (javaBitmap != nullptr) {        GraphicsJNI::reinitBitmap(env, javaBitmap, outputBitmap.info(), isPremultiplied);        outputBitmap.notifyPixelsChanged();        // If a java bitmap was passed in for reuse, pass it back        return javaBitmap;    }    int bitmapCreateFlags = 0x0;    if (isMutable) bitmapCreateFlags |= GraphicsJNI::kBitmapCreateFlag_Mutable;    if (isPremultiplied) bitmapCreateFlags |= GraphicsJNI::kBitmapCreateFlag_Premultiplied;    // now create the java bitmap    return GraphicsJNI::createBitmap(env, javaAllocator.getStorageObjAndReset(),            bitmapCreateFlags, ninePatchChunk, ninePatchInsets, -1);}

待修改

0 0
原创粉丝点击