From 0d21c7651f5b24d6030a99009e87b8ea0ce707cd Mon Sep 17 00:00:00 2001 From: Rahul Behera Date: Wed, 18 Mar 2026 17:27:00 -0500 Subject: [PATCH 1/2] android: Add hardware-backed Bitmap decode API MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds new methods to AvifDecoder that decode AVIF images directly into AHardwareBuffer and return hardware-backed Bitmaps (Bitmap.Config.HARDWARE). Hardware Bitmaps are GPU-resident, require no CPU→GPU upload, and are directly compatible with Canvas, ImageView, and Drawable. New API (requires API 26): AvifDecoder.decodeHardwareBitmap(ByteBuffer, int, int[, boolean]) AvifDecoder.nextFrameHardwareBitmap([boolean]) AvifDecoder.nthFrameHardwareBitmap(int[, boolean]) AvifDecoder.isHighBitDepthDisplaySupported(Display) The allowHdr parameter controls format selection: when true and the image has depth > 8, an R16G16B16A16_FLOAT AHardwareBuffer is tried first to preserve HDR precision (PQ/HLG color spaces tagged accordingly). Falls back to R8G8B8A8_UNORM if FP16 is unsupported or allowHdr is false. avifImageYUVToRGB decodes directly into the locked AHardwareBuffer memory with no intermediate copy. ApplyCrop is introduced as a shared helper used by both the software Bitmap and hardware Bitmap decode paths. Uses __ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__ (NDK 25) so AHardwareBuffer symbols are weak-linked and the library remains loadable on API < 26. --- .../aomedia/avif/android/AvifDecoderTest.java | 72 ++++ .../org/aomedia/avif/android/AvifDecoder.java | 212 ++++++++++ .../src/main/jni/CMakeLists.txt | 5 +- .../src/main/jni/libavif_jni.cc | 381 ++++++++++++++++++ 4 files changed, 669 insertions(+), 1 deletion(-) diff --git a/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java index e20b35986f..37e8180177 100644 --- a/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java +++ b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java @@ -5,6 +5,8 @@ import android.content.Context; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; +import android.hardware.HardwareBuffer; +import android.os.Build; import androidx.test.platform.app.InstrumentationRegistry; import java.io.IOException; import java.io.InputStream; @@ -256,6 +258,76 @@ public void testDecodeRegularClass() throws IOException { decoder.release(); } + // Tests hardware-bitmap decode for still images. Runs only once per image (skips when + // config != ARGB_8888 to avoid redundant iterations over the same image). + @Test + public void testDecodeHardwareBitmap() throws IOException { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { + return; + } + if (image.isAnimated || config != Config.ARGB_8888) { + return; + } + ByteBuffer buffer = image.getBuffer(); + assertThat(buffer).isNotNull(); + // Test SDR path (R8G8B8A8). + Bitmap bitmap = AvifDecoder.decodeHardwareBitmap(buffer, buffer.remaining()); + assertThat(bitmap).isNotNull(); + assertThat(bitmap.getConfig()).isEqualTo(Config.HARDWARE); + assertThat(bitmap.getWidth()).isEqualTo(image.width); + assertThat(bitmap.getHeight()).isEqualTo(image.height); + // For >8-bit images, also test the HDR path (FP16). + if (image.depth > 8) { + buffer.rewind(); + Bitmap hdrBitmap = AvifDecoder.decodeHardwareBitmap(buffer, buffer.remaining(), 1, + /* allowHdr= */ true); + assertThat(hdrBitmap).isNotNull(); + assertThat(hdrBitmap.getConfig()).isEqualTo(Config.HARDWARE); + } + } + + // Tests hardware-bitmap decode for animated images. + @Test + public void testDecodeAnimatedHardwareBitmap() throws IOException { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { + return; + } + if (!image.isAnimated || config != Config.ARGB_8888) { + return; + } + ByteBuffer buffer = image.getBuffer(); + AvifDecoder decoder = AvifDecoder.create(buffer, image.threads); + assertThat(decoder).isNotNull(); + // Test with allowHdr=true to exercise the FP16 path for >8-bit animated images. + boolean allowHdr = image.depth > 8; + for (int i = 0; i < image.frameCount; i++) { + Bitmap bitmap = decoder.nextFrameHardwareBitmap(allowHdr); + assertThat(bitmap).isNotNull(); + assertThat(bitmap.getConfig()).isEqualTo(Config.HARDWARE); + assertThat(bitmap.getWidth()).isEqualTo(image.width); + assertThat(bitmap.getHeight()).isEqualTo(image.height); + } + // Test nthFrameHardwareBitmap. + Bitmap bitmap = decoder.nthFrameHardwareBitmap(0, allowHdr); + assertThat(bitmap).isNotNull(); + assertThat(bitmap.getConfig()).isEqualTo(Config.HARDWARE); + + // Test buffer-reuse path: allocate once, decode all frames into the same buffer. + HardwareBuffer hwb = decoder.createHardwareBuffer(allowHdr); + assertThat(hwb).isNotNull(); + Bitmap reuseBitmap = Bitmap.wrapHardwareBuffer(hwb, null); + assertThat(reuseBitmap).isNotNull(); + assertThat(reuseBitmap.getConfig()).isEqualTo(Config.HARDWARE); + for (int i = 0; i < image.frameCount; i++) { + Bitmap result = decoder.nextFrameHardwareBitmap(allowHdr, hwb); + assertThat(result).isNotNull(); + assertThat(result.getConfig()).isEqualTo(Config.HARDWARE); + } + assertThat(decoder.nthFrameHardwareBitmap(0, allowHdr, hwb)).isNotNull(); + hwb.close(); + decoder.release(); + } + @Test public void testUtilityFunctions() throws IOException { // Test the avifResult value whose value and string representations are least likely to change. diff --git a/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java b/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java index ee8c07e843..983291552b 100644 --- a/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java +++ b/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java @@ -4,7 +4,12 @@ package org.aomedia.avif.android; import android.graphics.Bitmap; +import android.hardware.HardwareBuffer; +import android.hardware.display.DisplayManager; +import android.os.Build; +import android.view.Display; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import java.nio.ByteBuffer; /** @@ -253,6 +258,213 @@ public int nthFrame(int n, Bitmap bitmap) { */ public static native String versionString(); + /** + * Returns true if {@code display} supports high bit-depth (HDR) rendering. + * + *

Pass the result to {@link #decodeHardwareBitmap(ByteBuffer, int, int, boolean)} as + * {@code allowHdr}: on SDR displays this avoids allocating an FP16 buffer; on HDR displays it + * preserves the full colour range of >8-bit AVIF images. + * + *

Requires API 24; always returns false on older devices. + * + * @param display The display to query (typically {@code WindowManager.getDefaultDisplay()} or + * a display obtained from {@link DisplayManager}). + * @return true if the display can render HDR content. + */ + @RequiresApi(24) + public static boolean isHighBitDepthDisplaySupported(Display display) { + if (Build.VERSION.SDK_INT < 24) return false; + if (Build.VERSION.SDK_INT >= 26 && display.isWideColorGamut()) return true; + Display.HdrCapabilities caps = display.getHdrCapabilities(); + return caps != null && caps.getSupportedHdrTypes().length > 0; + } + + /** + * Decodes a still AVIF image and returns a hardware-backed {@link Bitmap} (Config.HARDWARE). + * + *

The returned Bitmap is GPU-resident and cannot be modified. Returns null if the device does + * not support AHardwareBuffer (API < 26) or if the decode fails. + * + * @param encoded The encoded AVIF image. encoded.position() must be 0. + * @param length Length of the encoded buffer. + * @param threads Number of decode threads (0 = library default, negative = CPU core count). + * @param allowHdr When true and the image has depth > 8, an R16G16B16A16_FLOAT (FP16) buffer is + * used to preserve HDR precision. When false, R8G8B8A8_UNORM is always used (SDR). Use + * {@link #isHighBitDepthDisplaySupported} to determine the right value. + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public static Bitmap decodeHardwareBitmap(ByteBuffer encoded, int length, int threads, + boolean allowHdr) { + if (Build.VERSION.SDK_INT < 26) { + return null; + } + return (Bitmap) nativeDecodeHardwareBitmap(encoded, length, threads, allowHdr); + } + + /** + * Decodes a still AVIF image and returns a hardware-backed {@link Bitmap} (Config.HARDWARE). + * + *

The returned Bitmap is GPU-resident and cannot be modified. Returns null if the device does + * not support AHardwareBuffer (API < 26) or if the decode fails. + * + *

Always uses R8G8B8A8_UNORM (SDR). For HDR-aware decoding, use + * {@link #decodeHardwareBitmap(ByteBuffer, int, int, boolean)}. + * + * @param encoded The encoded AVIF image. encoded.position() must be 0. + * @param length Length of the encoded buffer. + * @param threads Number of decode threads (0 = library default, negative = CPU core count). + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public static Bitmap decodeHardwareBitmap(ByteBuffer encoded, int length, int threads) { + return decodeHardwareBitmap(encoded, length, threads, /* allowHdr= */ false); + } + + /** + * Decodes a still AVIF image and returns a hardware-backed {@link Bitmap} (Config.HARDWARE). + * + *

Uses a single decode thread and R8G8B8A8_UNORM (SDR). Returns null on failure. + * + * @param encoded The encoded AVIF image. encoded.position() must be 0. + * @param length Length of the encoded buffer. + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public static Bitmap decodeHardwareBitmap(ByteBuffer encoded, int length) { + return decodeHardwareBitmap(encoded, length, /* threads= */ 1, /* allowHdr= */ false); + } + + /** + * Allocates a {@link HardwareBuffer} compatible with this decoder's image for use with + * {@link #nextFrameHardwareBitmap(boolean, HardwareBuffer)} across animation frames. + * + *

Reuse the same buffer each frame: a {@link Bitmap} wrapping it via + * {@link Bitmap#wrapHardwareBuffer} reflects new content without re-allocation. + * The caller is responsible for closing the buffer when done. + * + * @param allowHdr When true, prefer R16G16B16A16_FLOAT for >8-bit images (falls back to + * R8G8B8A8_UNORM if unsupported). When false, always uses R8G8B8A8_UNORM. + * @return A new HardwareBuffer, or null on failure. + */ + @RequiresApi(26) + @Nullable + public HardwareBuffer createHardwareBuffer(boolean allowHdr) { + if (Build.VERSION.SDK_INT < 26) return null; + return (HardwareBuffer) nativeCreateHardwareBuffer(width, height, depth, allowHdr); + } + + /** + * Decodes the next frame of an animated AVIF and returns a hardware-backed {@link Bitmap}. + * + *

If {@code dest} is non-null, decodes into that buffer and wraps it as a Bitmap — the + * same Bitmap created via {@link Bitmap#wrapHardwareBuffer} reflects the new content without + * re-allocation. If {@code dest} is null, a new {@link HardwareBuffer} is allocated internally. + * + * @param allowHdr When true and the image has depth > 8, FP16 is used. See + * {@link #decodeHardwareBitmap(ByteBuffer, int, int, boolean)}. + * @param dest Optional pre-allocated buffer to decode into. Must match image dimensions. + * Use {@link #createHardwareBuffer} to allocate a compatible buffer. + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public Bitmap nextFrameHardwareBitmap(boolean allowHdr, @Nullable HardwareBuffer dest) { + if (Build.VERSION.SDK_INT < 26) return null; + return (Bitmap) nativeNextFrameHardwareBitmap(decoder, allowHdr, dest); + } + + /** + * Decodes the next frame of an animated AVIF and returns a hardware-backed {@link Bitmap}. + * + *

Allocates a new {@link HardwareBuffer} internally on each call. For zero-copy frame + * reuse, use {@link #nextFrameHardwareBitmap(boolean, HardwareBuffer)} instead. + * + * @param allowHdr When true and the image has depth > 8, FP16 is used. + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public Bitmap nextFrameHardwareBitmap(boolean allowHdr) { + return nextFrameHardwareBitmap(allowHdr, /* dest= */ null); + } + + /** + * Decodes the next frame of an animated AVIF and returns a hardware-backed {@link Bitmap}. + * + *

Uses R8G8B8A8_UNORM (SDR). Returns null on failure. + * + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public Bitmap nextFrameHardwareBitmap() { + return nextFrameHardwareBitmap(/* allowHdr= */ false, /* dest= */ null); + } + + /** + * Decodes the nth frame of an animated AVIF and returns a hardware-backed {@link Bitmap}. + * + *

If {@code dest} is non-null, decodes into that buffer and wraps it as a Bitmap. If + * {@code dest} is null, a new {@link HardwareBuffer} is allocated internally. + * + * @param n The zero-based index of the frame to decode. + * @param allowHdr When true and the image has depth > 8, FP16 is used. + * @param dest Optional pre-allocated buffer to decode into. Must match image dimensions. + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public Bitmap nthFrameHardwareBitmap(int n, boolean allowHdr, @Nullable HardwareBuffer dest) { + if (Build.VERSION.SDK_INT < 26) return null; + return (Bitmap) nativeNthFrameHardwareBitmap(decoder, n, allowHdr, dest); + } + + /** + * Decodes the nth frame of an animated AVIF and returns a hardware-backed {@link Bitmap}. + * + *

Allocates a new {@link HardwareBuffer} internally. For zero-copy reuse, use + * {@link #nthFrameHardwareBitmap(int, boolean, HardwareBuffer)} instead. + * + * @param n The zero-based index of the frame to decode. + * @param allowHdr When true and the image has depth > 8, FP16 is used. + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public Bitmap nthFrameHardwareBitmap(int n, boolean allowHdr) { + return nthFrameHardwareBitmap(n, allowHdr, /* dest= */ null); + } + + /** + * Decodes the nth frame of an animated AVIF and returns a hardware-backed {@link Bitmap}. + * + *

Uses R8G8B8A8_UNORM (SDR). Returns null on failure. + * + * @param n The zero-based index of the frame to decode. + * @return A hardware-backed Bitmap on success, null on failure. + */ + @RequiresApi(26) + @Nullable + public Bitmap nthFrameHardwareBitmap(int n) { + return nthFrameHardwareBitmap(n, /* allowHdr= */ false, /* dest= */ null); + } + + private static native Object nativeDecodeHardwareBitmap( + ByteBuffer encoded, int length, int threads, boolean allowHdr); + + private native Object nativeNextFrameHardwareBitmap( + long decoder, boolean allowHdr, Object dest); + + private native Object nativeNthFrameHardwareBitmap( + long decoder, int n, boolean allowHdr, Object dest); + + private native Object nativeCreateHardwareBuffer( + int width, int height, int depth, boolean allowHdr); + private native long createDecoder(ByteBuffer encoded, int length, int threads); private native void destroyDecoder(long decoder); diff --git a/android_jni/avifandroidjni/src/main/jni/CMakeLists.txt b/android_jni/avifandroidjni/src/main/jni/CMakeLists.txt index da944b3638..778b4bddd0 100644 --- a/android_jni/avifandroidjni/src/main/jni/CMakeLists.txt +++ b/android_jni/avifandroidjni/src/main/jni/CMakeLists.txt @@ -40,4 +40,7 @@ include_directories(${CPU_FEATURES_DIR}) add_library(cpufeatures STATIC "${CPU_FEATURES_DIR}/cpu-features.c") target_link_options(avif_android PRIVATE "-Wl,-z,max-page-size=16384") -target_link_libraries(avif_android jnigraphics avif log cpufeatures) +# Make AHardwareBuffer symbols weak so they are absent (null) on API < 26 rather +# than causing a dlopen failure at load time. +target_compile_definitions(avif_android PRIVATE __ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__) +target_link_libraries(avif_android android jnigraphics avif log cpufeatures) diff --git a/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc b/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc index 83eb3e3d93..c7d21bb207 100644 --- a/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc +++ b/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc @@ -1,7 +1,10 @@ // Copyright 2022 Google LLC // SPDX-License-Identifier: BSD-2-Clause +#include #include +#include +#include #include #include #include @@ -102,6 +105,29 @@ bool CreateDecoderAndParse(AvifDecoderWrapper* const decoder, return true; } +avifImage* ApplyCrop( + AvifDecoderWrapper* const decoder, + std::unique_ptr& cropped_image) { + if (decoder->decoder->image->width == decoder->crop.width && + decoder->decoder->image->height == decoder->crop.height && + decoder->crop.x == 0 && decoder->crop.y == 0) { + return decoder->decoder->image; + } + cropped_image.reset(avifImageCreateEmpty()); + if (cropped_image == nullptr) { + LOGE("Failed to allocate cropped image."); + return nullptr; + } + avifResult res = avifImageSetViewRect(cropped_image.get(), + decoder->decoder->image, + &decoder->crop); + if (res != AVIF_RESULT_OK) { + LOGE("Failed to set crop rectangle. Status: %d", res); + return nullptr; + } + return cropped_image.get(); +} + avifResult AvifImageToBitmap(JNIEnv* const env, AvifDecoderWrapper* const decoder, jobject bitmap) { @@ -243,6 +269,309 @@ bool JniExceptionCheck(JNIEnv* env) { return true; } +AHardwareBuffer* TryAllocateHardwareBuffer(uint32_t width, uint32_t height, + uint32_t format) { + AHardwareBuffer_Desc desc = {}; + desc.width = width; + desc.height = height; + desc.layers = 1; + desc.format = format; + desc.usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN | + AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; + // On API 29+, check if the format is supported before allocating. + if (android_get_device_api_level() >= 29) { + if (!AHardwareBuffer_isSupported(&desc)) { + return nullptr; + } + } + AHardwareBuffer* buffer = nullptr; + if (AHardwareBuffer_allocate(&desc, &buffer) != 0) { + return nullptr; + } + return buffer; +} + +AHardwareBuffer* TryDirectDecode(avifImage* image, uint32_t ahb_format, + avifRGBFormat rgb_format, int rgb_depth, + avifBool is_float, int bytes_per_pixel) { + AHardwareBuffer* hwb = + TryAllocateHardwareBuffer(image->width, image->height, ahb_format); + if (hwb == nullptr) return nullptr; + + AHardwareBuffer_Desc desc; + AHardwareBuffer_describe(hwb, &desc); + + void* pixels = nullptr; + if (AHardwareBuffer_lock(hwb, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, + nullptr, &pixels) != 0 || + pixels == nullptr) { + AHardwareBuffer_release(hwb); + return nullptr; + } + + avifRGBImage rgb; + avifRGBImageSetDefaults(&rgb, image); + rgb.format = rgb_format; + rgb.depth = rgb_depth; + rgb.isFloat = is_float; + rgb.pixels = static_cast(pixels); + // AHardwareBuffer_Desc.stride is in pixels, not bytes. + rgb.rowBytes = desc.stride * bytes_per_pixel; + rgb.alphaPremultiplied = AVIF_TRUE; + + avifResult res = avifImageYUVToRGB(image, &rgb); + AHardwareBuffer_unlock(hwb, nullptr); + if (res != AVIF_RESULT_OK) { + LOGE("avifImageYUVToRGB failed: %d", res); + AHardwareBuffer_release(hwb); + return nullptr; + } + return hwb; +} + +AHardwareBuffer* AvifImageToHardwareBuffer(avifImage* image, bool allow_hdr, + uint32_t* out_format) { + if (allow_hdr && image->depth > 8) { + AHardwareBuffer* hwb = + TryDirectDecode(image, AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT, + AVIF_RGB_FORMAT_RGBA, 16, AVIF_TRUE, 8); + if (hwb != nullptr) { + *out_format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT; + return hwb; + } + } + AHardwareBuffer* hwb = + TryDirectDecode(image, AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM, + AVIF_RGB_FORMAT_RGBA, 8, AVIF_FALSE, 4); + if (hwb != nullptr) { + *out_format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM; + } + return hwb; +} + +// avifImageYUVToRGB preserves the source transfer function and does not +// tone-map, so PQ/HLG images must be tagged with the matching HDR color space. +jobject GetColorSpace(JNIEnv* env, const avifImage* image, + uint32_t ahb_format) { + if (ahb_format != AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT) { + return nullptr; + } + const int api = android_get_device_api_level(); + const bool is_bt2020 = + (image->colorPrimaries == AVIF_COLOR_PRIMARIES_BT2020); + // Look up ColorSpace.get(ColorSpace.Named.). + auto get_named_cs = [&](const char* name) -> jobject { + jclass cs_named = env->FindClass("android/graphics/ColorSpace$Named"); + if (cs_named == nullptr) { + if (env->ExceptionCheck()) env->ExceptionClear(); + return nullptr; + } + jfieldID fid = env->GetStaticFieldID(cs_named, name, + "Landroid/graphics/ColorSpace$Named;"); + if (fid == nullptr) { + if (env->ExceptionCheck()) env->ExceptionClear(); + return nullptr; + } + jobject named_val = env->GetStaticObjectField(cs_named, fid); + if (named_val == nullptr) return nullptr; + jclass cs = env->FindClass("android/graphics/ColorSpace"); + if (cs == nullptr) { + if (env->ExceptionCheck()) env->ExceptionClear(); + return nullptr; + } + jmethodID get = env->GetStaticMethodID( + cs, "get", + "(Landroid/graphics/ColorSpace$Named;)Landroid/graphics/ColorSpace;"); + if (get == nullptr) { + if (env->ExceptionCheck()) env->ExceptionClear(); + return nullptr; + } + jobject result = env->CallStaticObjectMethod(cs, get, named_val); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + return nullptr; + } + return result; + }; + if (is_bt2020 && + image->transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_PQ && + api >= 33) { + jobject cs = get_named_cs("BT2020_PQ"); + if (cs != nullptr) return cs; + } + if (is_bt2020 && + image->transferCharacteristics == AVIF_TRANSFER_CHARACTERISTICS_HLG && + api >= 34) { + jobject cs = get_named_cs("BT2020_HLG"); + if (cs != nullptr) return cs; + } + // FP16 with non-HDR transfer: gamma-encoded SDR content. Tag as SRGB, not + // LINEAR_EXTENDED_SRGB. + return get_named_cs("SRGB"); +} + +bool AvifImageToExistingHardwareBuffer(JNIEnv* env, + AvifDecoderWrapper* decoder, + jobject dest) { + if (android_get_device_api_level() < 26) return false; + + // AHardwareBuffer_fromHardwareBuffer returns a borrowed pointer; the Java + // HardwareBuffer retains ownership. Do not call AHardwareBuffer_release on + // the returned pointer. + AHardwareBuffer* ahb = AHardwareBuffer_fromHardwareBuffer(env, dest); + if (ahb == nullptr) return false; + + std::unique_ptr cropped_image( + nullptr, avifImageDestroy); + avifImage* image = ApplyCrop(decoder, cropped_image); + if (image == nullptr) return false; + + AHardwareBuffer_Desc desc; + AHardwareBuffer_describe(ahb, &desc); + + if (desc.width != image->width || desc.height != image->height) { + LOGE("AvifImageToExistingHardwareBuffer: buffer %ux%u != image %ux%u", + desc.width, desc.height, image->width, image->height); + return false; + } + + void* pixels = nullptr; + if (AHardwareBuffer_lock(ahb, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, + nullptr, &pixels) != 0 || + pixels == nullptr) { + return false; + } + + avifRGBImage rgb; + avifRGBImageSetDefaults(&rgb, image); + rgb.alphaPremultiplied = AVIF_TRUE; + rgb.pixels = static_cast(pixels); + + bool ok = false; + switch (desc.format) { + case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: + rgb.format = AVIF_RGB_FORMAT_RGBA; + rgb.depth = 8; + rgb.isFloat = AVIF_FALSE; + rgb.rowBytes = desc.stride * 4; + ok = avifImageYUVToRGB(image, &rgb) == AVIF_RESULT_OK; + break; + case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: + rgb.format = AVIF_RGB_FORMAT_RGBA; + rgb.depth = 16; + rgb.isFloat = AVIF_TRUE; + rgb.rowBytes = desc.stride * 8; + ok = avifImageYUVToRGB(image, &rgb) == AVIF_RESULT_OK; + break; + default: + LOGE("AvifImageToExistingHardwareBuffer: unsupported format 0x%x", + desc.format); + break; + } + + AHardwareBuffer_unlock(ahb, nullptr); + return ok; +} + +jobject WrapHardwareBufferAsBitmap(JNIEnv* env, jobject java_hwb, + jobject color_space) { + jclass bitmap_class = env->FindClass("android/graphics/Bitmap"); + if (bitmap_class == nullptr) { + if (env->ExceptionCheck()) env->ExceptionClear(); + return nullptr; + } + jmethodID wrap_method = env->GetStaticMethodID( + bitmap_class, "wrapHardwareBuffer", + "(Landroid/hardware/HardwareBuffer;Landroid/graphics/ColorSpace;)" + "Landroid/graphics/Bitmap;"); + if (wrap_method == nullptr) { + if (env->ExceptionCheck()) env->ExceptionClear(); + return nullptr; + } + jobject bitmap = env->CallStaticObjectMethod(bitmap_class, wrap_method, + java_hwb, color_space); + if (env->ExceptionCheck()) { + env->ExceptionClear(); + return nullptr; + } + return bitmap; +} + +// Decodes the current image into a hardware-backed Bitmap. +// If dest is non-null, decodes into that caller-provided HardwareBuffer and +// wraps it as a Bitmap (null color space — caller chose the format). +// If dest is null, allocates a new AHardwareBuffer, selects the color space +// from the image's CICP metadata, wraps as a Bitmap, and closes the +// intermediate Java HardwareBuffer (wrapHardwareBuffer holds its own ref). +jobject AvifImageToHardwareBitmap(JNIEnv* env, AvifDecoderWrapper* decoder, + bool allow_hdr, jobject dest) { + if (android_get_device_api_level() < 26) return nullptr; + + if (dest != nullptr) { + if (!AvifImageToExistingHardwareBuffer(env, decoder, dest)) return nullptr; + return WrapHardwareBufferAsBitmap(env, dest, /*color_space=*/nullptr); + } + + std::unique_ptr cropped_image( + nullptr, avifImageDestroy); + avifImage* image = ApplyCrop(decoder, cropped_image); + if (image == nullptr) return nullptr; + uint32_t ahb_format = 0; + AHardwareBuffer* hwb = + AvifImageToHardwareBuffer(image, allow_hdr, &ahb_format); + if (hwb == nullptr) return nullptr; + jobject java_hwb = AHardwareBuffer_toHardwareBuffer(env, hwb); + // toHardwareBuffer increments the refcount; release the native reference now. + AHardwareBuffer_release(hwb); + if (java_hwb == nullptr) return nullptr; + jobject color_space = GetColorSpace(env, image, ahb_format); + jobject bitmap = WrapHardwareBufferAsBitmap(env, java_hwb, color_space); + // Close the Java HardwareBuffer — wrapHardwareBuffer() holds its own ref. + jclass hwb_class = env->FindClass("android/hardware/HardwareBuffer"); + if (hwb_class != nullptr) { + jmethodID close = env->GetMethodID(hwb_class, "close", "()V"); + if (close != nullptr) env->CallVoidMethod(java_hwb, close); + if (env->ExceptionCheck()) env->ExceptionClear(); + } + return bitmap; +} + +jobject CreateHardwareBufferForImage(JNIEnv* env, int width, int height, + int depth, bool allow_hdr) { + if (android_get_device_api_level() < 26) return nullptr; + + const uint64_t usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN | + AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; + + auto try_alloc = [&](uint32_t format) -> AHardwareBuffer* { + AHardwareBuffer_Desc desc = {}; + desc.width = static_cast(width); + desc.height = static_cast(height); + desc.layers = 1; + desc.format = format; + desc.usage = usage; + if (android_get_device_api_level() >= 29 && + !AHardwareBuffer_isSupported(&desc)) { + return nullptr; + } + AHardwareBuffer* hwb = nullptr; + return (AHardwareBuffer_allocate(&desc, &hwb) == 0) ? hwb : nullptr; + }; + + AHardwareBuffer* hwb = nullptr; + if (allow_hdr && depth > 8) { + hwb = try_alloc(AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT); + } + if (hwb == nullptr) { + hwb = try_alloc(AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM); + } + if (hwb == nullptr) return nullptr; + + jobject java_hwb = AHardwareBuffer_toHardwareBuffer(env, hwb); + AHardwareBuffer_release(hwb); + return java_hwb; +} + } // namespace jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) { @@ -429,3 +758,55 @@ FUNC(void, destroyDecoder, jlong jdecoder) { reinterpret_cast(jdecoder); delete decoder; } + +FUNC(jobject, nativeDecodeHardwareBitmap, jobject encoded, jint length, + jint threads, jboolean allow_hdr) { + IGNORE_UNUSED_JNI_PARAMETERS; + const uint8_t* const buffer = + static_cast(env->GetDirectBufferAddress(encoded)); + AvifDecoderWrapper decoder; + if (!CreateDecoderAndParse(&decoder, buffer, length, + getThreadCount(threads))) { + return nullptr; + } + avifResult res = avifDecoderNextImage(decoder.decoder); + if (res != AVIF_RESULT_OK) { + LOGE("Failed to decode AVIF image. Status: %d", res); + return nullptr; + } + return AvifImageToHardwareBitmap(env, &decoder, allow_hdr, /*dest=*/nullptr); +} + +FUNC(jobject, nativeNextFrameHardwareBitmap, jlong jdecoder, jboolean allow_hdr, + jobject dest) { + IGNORE_UNUSED_JNI_PARAMETERS; + AvifDecoderWrapper* const decoder = + reinterpret_cast(jdecoder); + avifResult res = avifDecoderNextImage(decoder->decoder); + if (res != AVIF_RESULT_OK) { + LOGE("Failed to decode AVIF image. Status: %d", res); + return nullptr; + } + return AvifImageToHardwareBitmap(env, decoder, allow_hdr, dest); +} + +FUNC(jobject, nativeNthFrameHardwareBitmap, jlong jdecoder, jint n, + jboolean allow_hdr, jobject dest) { + IGNORE_UNUSED_JNI_PARAMETERS; + AvifDecoderWrapper* const decoder = + reinterpret_cast(jdecoder); + avifResult res = avifDecoderNthImage(decoder->decoder, n); + if (res != AVIF_RESULT_OK) { + LOGE("Failed to decode AVIF image. Status: %d", res); + return nullptr; + } + return AvifImageToHardwareBitmap(env, decoder, allow_hdr, dest); +} + +FUNC(jobject, nativeCreateHardwareBuffer, jint width, jint height, jint depth, + jboolean allow_hdr) { + IGNORE_UNUSED_JNI_PARAMETERS; + return CreateHardwareBufferForImage(env, width, height, depth, allow_hdr); +} + + From e60e863de88c3cccff7f768dc64606215b70c0ee Mon Sep 17 00:00:00 2001 From: Rahul Behera Date: Thu, 19 Mar 2026 10:24:15 -0500 Subject: [PATCH 2/2] android: Fix off-by-one in testDecodeAnimatedHardwareBitmap After nthFrameHardwareBitmap(0), the decoder is positioned at frame 0, so nextFrameHardwareBitmap advances to frame 1 on the first call. Looping frameCount times overshoots by one, hitting AVIF_RESULT_NO_IMAGES_REMAINING on the last iteration. Switch the buffer-reuse loop to nthFrameHardwareBitmap(i, ...) which iterates frames 0 through frameCount-1 cleanly, and add a separate assertion to verify nextFrameHardwareBitmap with a dest buffer. --- .../java/org/aomedia/avif/android/AvifDecoderTest.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java index 37e8180177..9bdead0c56 100644 --- a/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java +++ b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AvifDecoderTest.java @@ -319,11 +319,13 @@ public void testDecodeAnimatedHardwareBitmap() throws IOException { assertThat(reuseBitmap).isNotNull(); assertThat(reuseBitmap.getConfig()).isEqualTo(Config.HARDWARE); for (int i = 0; i < image.frameCount; i++) { - Bitmap result = decoder.nextFrameHardwareBitmap(allowHdr, hwb); + Bitmap result = decoder.nthFrameHardwareBitmap(i, allowHdr, hwb); assertThat(result).isNotNull(); assertThat(result.getConfig()).isEqualTo(Config.HARDWARE); } - assertThat(decoder.nthFrameHardwareBitmap(0, allowHdr, hwb)).isNotNull(); + // Also verify nextFrameHardwareBitmap with dest: seek to frame 0, advance to frame 1. + decoder.nthFrameHardwareBitmap(0, allowHdr, hwb); + assertThat(decoder.nextFrameHardwareBitmap(allowHdr, hwb)).isNotNull(); hwb.close(); decoder.release(); }