android_jni: Return avifResult from decoding functions

Instead of merely returning a boolean, return avifResult from
nextFrame() and nthFrame(). In the java layer, these functions
have a return type of int. This prevents the need to copy the
avifResult enum to the java layer and keep them in sync.

GOOGLE_INTERNAL_CL: 525827147
diff --git a/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AnimatedImageTest.java b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AnimatedImageTest.java
index 3d87107..8fac08a 100644
--- a/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AnimatedImageTest.java
+++ b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AnimatedImageTest.java
@@ -23,6 +23,8 @@
 @RunWith(Parameterized.class)
 public class AnimatedImageTest {
 
+  private static final int AVIF_RESULT_OK = 0;
+
   private static class Image {
     public final String filename;
     public final int width;
@@ -106,25 +108,25 @@
     assertThat(bitmap).isNotNull();
     for (int i = 0; i < image.frameCount; i++) {
       assertThat(decoder.nextFrameIndex()).isEqualTo(i);
-      assertThat(decoder.nextFrame(bitmap)).isTrue();
+      assertThat(decoder.nextFrame(bitmap)).isEqualTo(AVIF_RESULT_OK);
       assertThat(frameDurations[i]).isWithin(1.0e-2).of(image.frameDuration);
     }
     assertThat(decoder.nextFrameIndex()).isEqualTo(image.frameCount);
     // Fetch the first frame again.
-    assertThat(decoder.nthFrame(0, bitmap)).isTrue();
+    assertThat(decoder.nthFrame(0, bitmap)).isEqualTo(AVIF_RESULT_OK);
     // Now nextFrame will return the second frame.
     assertThat(decoder.nextFrameIndex()).isEqualTo(1);
-    assertThat(decoder.nextFrame(bitmap)).isTrue();
+    assertThat(decoder.nextFrame(bitmap)).isEqualTo(AVIF_RESULT_OK);
     // Fetch the (frameCount/2)th frame.
-    assertThat(decoder.nthFrame(image.frameCount / 2, bitmap)).isTrue();
+    assertThat(decoder.nthFrame(image.frameCount / 2, bitmap)).isEqualTo(AVIF_RESULT_OK);
     // Fetch the last frame.
-    assertThat(decoder.nthFrame(image.frameCount - 1, bitmap)).isTrue();
+    assertThat(decoder.nthFrame(image.frameCount - 1, bitmap)).isEqualTo(AVIF_RESULT_OK);
     // Now nextFrame should return false.
     assertThat(decoder.nextFrameIndex()).isEqualTo(image.frameCount);
-    assertThat(decoder.nextFrame(bitmap)).isFalse();
+    assertThat(decoder.nextFrame(bitmap)).isNotEqualTo(AVIF_RESULT_OK);
     // Passing out of bound values for n should fail.
-    assertThat(decoder.nthFrame(-1, bitmap)).isFalse();
-    assertThat(decoder.nthFrame(image.frameCount, bitmap)).isFalse();
+    assertThat(decoder.nthFrame(-1, bitmap)).isNotEqualTo(AVIF_RESULT_OK);
+    assertThat(decoder.nthFrame(image.frameCount, bitmap)).isNotEqualTo(AVIF_RESULT_OK);
     decoder.release();
   }
 
diff --git a/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java b/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java
index 696c2d5..db8eee5 100644
--- a/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java
+++ b/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java
@@ -195,15 +195,14 @@
    * Decodes the next frame of the animated AVIF into the bitmap.
    *
    * @param bitmap The decoded pixels will be copied into the bitmap.
-   * @return true on success and false on failure. A few possible reasons for failure are: 1) Input
-   *     was not valid AVIF. 2) Bitmap was not large enough to store the decoded image.
+   * @return 0 (AVIF_RESULT_OK) on success and some other avifStatus on failure. For a list of all
+   *     possible status codes, see the avifResult enum on avif.h in libavif's C source code.
    */
-  public boolean nextFrame(Bitmap bitmap) {
-    // TODO(vigneshv): Consider returning an avifResult here instead of just a boolean.
+  public int nextFrame(Bitmap bitmap) {
     return nextFrame(decoder, bitmap);
   }
 
-  private native boolean nextFrame(long decoder, Bitmap bitmap);
+  private native int nextFrame(long decoder, Bitmap bitmap);
 
   /**
    * Get the 0-based index of the frame that will be returned by the next call to {@link nextFrame}.
@@ -224,13 +223,14 @@
    *
    * @param bitmap The decoded pixels will be copied into the bitmap.
    * @param n The zero-based index of the frame to be decoded.
-   * @return true on success and false on failure.
+   * @return 0 (AVIF_RESULT_OK) on success and some other avifStatus on failure. For a list of all
+   *     possible status codes, see the avifResult enum on avif.h in libavif's C source code.
    */
-  public boolean nthFrame(int n, Bitmap bitmap) {
+  public int nthFrame(int n, Bitmap bitmap) {
     return nthFrame(decoder, n, bitmap);
   }
 
-  private native boolean nthFrame(long decoder, int n, Bitmap bitmap);
+  private native int nthFrame(long decoder, int n, Bitmap bitmap);
 
   private native long createDecoder(ByteBuffer encoded, int length, int threads);
 
diff --git a/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc b/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc
index f6038de..f6901b2 100644
--- a/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc
+++ b/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc
@@ -76,12 +76,13 @@
   return true;
 }
 
-bool AvifImageToBitmap(JNIEnv* const env, AvifDecoderWrapper* const decoder,
-                       jobject bitmap) {
+avifResult AvifImageToBitmap(JNIEnv* const env,
+                             AvifDecoderWrapper* const decoder,
+                             jobject bitmap) {
   AndroidBitmapInfo bitmap_info;
   if (AndroidBitmap_getInfo(env, bitmap, &bitmap_info) < 0) {
     LOGE("AndroidBitmap_getInfo failed.");
-    return false;
+    return AVIF_RESULT_UNKNOWN_ERROR;
   }
   // Ensure that the bitmap is large enough to store the decoded image.
   if (bitmap_info.width < decoder->decoder->image->width ||
@@ -91,20 +92,20 @@
         "%dx%d.",
         bitmap_info.width, bitmap_info.height, decoder->decoder->image->width,
         decoder->decoder->image->height);
-    return false;
+    return AVIF_RESULT_UNKNOWN_ERROR;
   }
   // Ensure that the bitmap format is RGBA_8888, RGB_565 or RGBA_F16.
   if (bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_8888 &&
       bitmap_info.format != ANDROID_BITMAP_FORMAT_RGB_565 &&
       bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_F16) {
     LOGE("Bitmap format (%d) is not supported.", bitmap_info.format);
-    return false;
+    return AVIF_RESULT_UNKNOWN_ERROR;
   }
   void* bitmap_pixels = nullptr;
   if (AndroidBitmap_lockPixels(env, bitmap, &bitmap_pixels) !=
       ANDROID_BITMAP_RESULT_SUCCESS) {
     LOGE("Failed to lock Bitmap.");
-    return false;
+    return AVIF_RESULT_UNKNOWN_ERROR;
   }
   avifRGBImage rgb_image;
   avifRGBImageSetDefaults(&rgb_image, decoder->decoder->image);
@@ -127,27 +128,27 @@
   AndroidBitmap_unlockPixels(env, bitmap);
   if (res != AVIF_RESULT_OK) {
     LOGE("Failed to convert YUV Pixels to RGB. Status: %d", res);
-    return false;
+    return res;
   }
-  return true;
+  return AVIF_RESULT_OK;
 }
 
-bool DecodeNextImage(JNIEnv* const env, AvifDecoderWrapper* const decoder,
-                     jobject bitmap) {
+avifResult DecodeNextImage(JNIEnv* const env, AvifDecoderWrapper* const decoder,
+                           jobject bitmap) {
   avifResult res = avifDecoderNextImage(decoder->decoder);
   if (res != AVIF_RESULT_OK) {
     LOGE("Failed to decode AVIF image. Status: %d", res);
-    return false;
+    return res;
   }
   return AvifImageToBitmap(env, decoder, bitmap);
 }
 
-bool DecodeNthImage(JNIEnv* const env, AvifDecoderWrapper* const decoder,
-                    uint32_t n, jobject bitmap) {
+avifResult DecodeNthImage(JNIEnv* const env, AvifDecoderWrapper* const decoder,
+                          uint32_t n, jobject bitmap) {
   avifResult res = avifDecoderNthImage(decoder->decoder, n);
   if (res != AVIF_RESULT_OK) {
     LOGE("Failed to decode AVIF image. Status: %d", res);
-    return false;
+    return res;
   }
   return AvifImageToBitmap(env, decoder, bitmap);
 }
@@ -207,7 +208,7 @@
                              getThreadCount(threads))) {
     return false;
   }
-  return DecodeNextImage(env, &decoder, bitmap);
+  return DecodeNextImage(env, &decoder, bitmap) == AVIF_RESULT_OK;
 }
 
 FUNC(jlong, createDecoder, jobject encoded, jint length, jint threads) {
@@ -272,7 +273,7 @@
   return reinterpret_cast<jlong>(decoder.release());
 }
 
-FUNC(jboolean, nextFrame, jlong jdecoder, jobject bitmap) {
+FUNC(jint, nextFrame, jlong jdecoder, jobject bitmap) {
   AvifDecoderWrapper* const decoder =
       reinterpret_cast<AvifDecoderWrapper*>(jdecoder);
   return DecodeNextImage(env, decoder, bitmap);
@@ -284,7 +285,7 @@
   return decoder->decoder->imageIndex + 1;
 }
 
-FUNC(jboolean, nthFrame, jlong jdecoder, jint n, jobject bitmap) {
+FUNC(jint, nthFrame, jlong jdecoder, jint n, jobject bitmap) {
   AvifDecoderWrapper* const decoder =
       reinterpret_cast<AvifDecoderWrapper*>(jdecoder);
   return DecodeNthImage(env, decoder, n, bitmap);