android_jni: Add JNI API for decoding Animated AVIF

GOOGLE_INTERNAL_CL: 523471241
diff --git a/android_jni/avifandroidjni/build.gradle b/android_jni/avifandroidjni/build.gradle
index 956528c..525b395 100644
--- a/android_jni/avifandroidjni/build.gradle
+++ b/android_jni/avifandroidjni/build.gradle
@@ -34,6 +34,7 @@
 }
 
 dependencies {
+    implementation "androidx.annotation:annotation:1.6.0"
     androidTestImplementation 'junit:junit:4.+'
     androidTestImplementation 'androidx.test.ext:junit:1.1.5'
     androidTestImplementation 'com.google.truth:truth:1.1.3'
diff --git a/android_jni/avifandroidjni/src/androidTest/assets/README b/android_jni/avifandroidjni/src/androidTest/assets/README
index 2307913..95262d5 100644
--- a/android_jni/avifandroidjni/src/androidTest/assets/README
+++ b/android_jni/avifandroidjni/src/androidTest/assets/README
@@ -5,3 +5,10 @@
   Source: https://github.com/AOMediaCodec/av1-avif/tree/master/testFiles/Link-U
   Filter: Only a subset of fox* files.
   Git Hash: 77bd20d
+
+* Sub-Directory: animated_avif
+  Description: Images used for testing animated AVIF decoding.
+  Source:
+  https://github.com/AOMediaCodec/av1-avif/tree/master/testFiles/Netflix/avis
+  Filter: None.
+  Git Hash: 77bd20d
diff --git a/android_jni/avifandroidjni/src/androidTest/assets/animated_avif/Chimera-AV1-10bit-480x270.avif b/android_jni/avifandroidjni/src/androidTest/assets/animated_avif/Chimera-AV1-10bit-480x270.avif
new file mode 100644
index 0000000..280af28
--- /dev/null
+++ b/android_jni/avifandroidjni/src/androidTest/assets/animated_avif/Chimera-AV1-10bit-480x270.avif
Binary files differ
diff --git a/android_jni/avifandroidjni/src/androidTest/assets/animated_avif/alpha_video.avif b/android_jni/avifandroidjni/src/androidTest/assets/animated_avif/alpha_video.avif
new file mode 100644
index 0000000..d88c9ae
--- /dev/null
+++ b/android_jni/avifandroidjni/src/androidTest/assets/animated_avif/alpha_video.avif
Binary files differ
diff --git a/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AnimatedImageTest.java b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AnimatedImageTest.java
new file mode 100644
index 0000000..2ad879d
--- /dev/null
+++ b/android_jni/avifandroidjni/src/androidTest/java/org/aomedia/avif/android/AnimatedImageTest.java
@@ -0,0 +1,102 @@
+package org.aomedia.avif.android;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.Bitmap.Config;
+import androidx.test.platform.app.InstrumentationRegistry;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameter;
+import org.junit.runners.Parameterized.Parameters;
+
+/** Instrumentation tests for the libavif JNI API, which will execute on an Android device. */
+@RunWith(Parameterized.class)
+public class AnimatedImageTest {
+
+  private static class Image {
+    public final String filename;
+    public final int width;
+    public final int height;
+    public final int depth;
+    public final int frameCount;
+    public final int repetitionCount;
+
+    public Image(
+        String filename, int width, int height, int depth, int frameCount, int repetitionCount) {
+      this.filename = filename;
+      this.width = width;
+      this.height = height;
+      this.depth = depth;
+      this.frameCount = frameCount;
+      this.repetitionCount = repetitionCount;
+    }
+  }
+
+  private static final Image[] IMAGES = {
+    // Parameter ordering: filename, width, height, depth, frameCount, repetitionCount.
+    new Image("alpha_video.avif", 640, 480, 8, 48, -2),
+    new Image("Chimera-AV1-10bit-480x270.avif", 480, 270, 10, 95, -2),
+  };
+
+  private static final String ASSET_DIRECTORY = "animated_avif";
+
+  private static final Bitmap.Config[] BITMAP_CONFIGS = {
+    Config.ARGB_8888, Config.RGBA_F16, Config.RGB_565,
+  };
+
+  @Parameters
+  public static List<Object[]> data() throws IOException {
+    ArrayList<Object[]> list = new ArrayList<>();
+    for (Bitmap.Config config : BITMAP_CONFIGS) {
+      for (Image image : IMAGES) {
+        list.add(new Object[] {config, image});
+      }
+    }
+    return list;
+  }
+
+  @Parameter(0)
+  public Bitmap.Config config;
+
+  @Parameter(1)
+  public Image image;
+
+  @Test
+  public void testAnimatedAvifDecode() throws IOException {
+    ByteBuffer buffer = getBuffer();
+    assertThat(buffer).isNotNull();
+    AvifDecoder decoder = AvifDecoder.create(buffer);
+    assertThat(decoder).isNotNull();
+    assertThat(decoder.getWidth()).isEqualTo(image.width);
+    assertThat(decoder.getHeight()).isEqualTo(image.height);
+    assertThat(decoder.getDepth()).isEqualTo(image.depth);
+    assertThat(decoder.getFrameCount()).isEqualTo(image.frameCount);
+    assertThat(decoder.getRepetitionCount()).isEqualTo(image.repetitionCount);
+    Bitmap bitmap = Bitmap.createBitmap(image.width, image.height, config);
+    assertThat(bitmap).isNotNull();
+    for (int i = 0; i < image.frameCount; i++) {
+      assertThat(decoder.nextFrame(bitmap)).isTrue();
+    }
+    decoder.release();
+  }
+
+  private ByteBuffer getBuffer() throws IOException {
+    Context context = InstrumentationRegistry.getInstrumentation().getTargetContext();
+    String assetPath = Paths.get(ASSET_DIRECTORY, image.filename).toString();
+    InputStream is = context.getAssets().open(assetPath);
+    ByteBuffer buffer = ByteBuffer.allocateDirect(is.available());
+    Channels.newChannel(is).read(buffer);
+    buffer.rewind();
+    return buffer;
+  }
+}
diff --git a/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java b/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java
index 216bfcd..39285d8 100644
--- a/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java
+++ b/android_jni/avifandroidjni/src/main/java/org/aomedia/avif/android/AvifDecoder.java
@@ -4,9 +4,36 @@
 package org.aomedia.avif.android;
 
 import android.graphics.Bitmap;
+import androidx.annotation.Nullable;
 import java.nio.ByteBuffer;
 
-/** An AVIF Decoder. AVIF Specification: https://aomediacodec.github.io/av1-avif/. */
+/**
+ * An AVIF Decoder. AVIF Specification: https://aomediacodec.github.io/av1-avif/.
+ *
+ * <p>There are two ways to use this class.
+ *
+ * <p>1) As a static utility class.
+ *
+ * <p>This class can be accessed statically without instantiating an object. This is useful to
+ * simply sniff and decode still AVIF images without having to maintain any decoder state. The
+ * following are the methods that can be accessed this way: {@link isAvifImage}, {@link getInfo} and
+ * {@link decode}. The {@link Info} inner class is used only in this case.
+ *
+ * <p>2) As an instantiated regular class.
+ *
+ * <p>When used this way, the {@link create} method must be used to create an instance of this class
+ * with a valid AVIF image. This will create a long running underlying decoder object which will be
+ * used to decode the image(s). Using the returned object, other public methods of the class can be
+ * called to get information about the image and to get the individual decoded frames. When the
+ * decoder object is no longer needed, {@link release} must be called to release the underlying
+ * decoder.
+ *
+ * <p>This is useful for decoding animated AVIF images and obtaining each decoded frame one after
+ * the other.
+ *
+ * <p>NOTE: The API for using this as an instantiated regular class is still under development and
+ * might change.
+ */
 @SuppressWarnings("CatchAndPrintStackTrace")
 public class AvifDecoder {
   static {
@@ -17,10 +44,18 @@
     }
   }
 
-  // This is a utility class and cannot be instantiated.
-  private AvifDecoder() {}
+  private long decoder;
+  private int width;
+  private int height;
+  private int depth;
+  private int frameCount;
+  private int repetitionCount;
 
-  /** Contains information about the AVIF Image. */
+  private AvifDecoder(ByteBuffer encoded) {
+    decoder = createDecoder(encoded, encoded.remaining());
+  }
+
+  /** Contains information about the AVIF Image. This class is only used for getInfo(). */
   public static class Info {
     public int width;
     public int height;
@@ -78,4 +113,71 @@
    *     value was passed for the threads parameter.
    */
   public static native boolean decode(ByteBuffer encoded, int length, Bitmap bitmap, int threads);
+
+  /** Get the width of the image. */
+  public int getWidth() {
+    return width;
+  }
+
+  /** Get the height of the image. */
+  public int getHeight() {
+    return height;
+  }
+
+  /** Get the depth (bit depth) of the image. */
+  public int getDepth() {
+    return depth;
+  }
+
+  /** Get the number of frames in the image. */
+  public int getFrameCount() {
+    return frameCount;
+  }
+
+  /**
+   * Get the number of repetitions for an animated image (see repetitionCount in avif.h for
+   * details).
+   */
+  public int getRepetitionCount() {
+    return repetitionCount;
+  }
+
+  /** Releases the underlying decoder object. */
+  public void release() {
+    if (decoder != 0) {
+      destroyDecoder(decoder);
+    }
+    decoder = 0;
+  }
+
+  /**
+   * Create and return an AvifDecoder.
+   *
+   * @param encoded The encoded AVIF image. encoded.position() must be 0. The memory of this
+   *     ByteBuffer must be kept alive until release() is called.
+   * @return null on failure. AvifDecoder object on success.
+   */
+  @Nullable
+  public static AvifDecoder create(ByteBuffer encoded) {
+    AvifDecoder decoder = new AvifDecoder(encoded);
+    return (decoder.decoder == 0) ? null : decoder;
+  }
+
+  /**
+   * Decodes the next frame of the animated AVIF into the bitmap.
+   *
+   * @param bitmap The decoded pixels will be copied into the bitmap.
+   * @return true on success and false on failure. A few possible reasons for failure are: 1) Input
+   *     was not valid AVIF. 2) Bitmap was not large enough to store the decoded image.
+   */
+  public boolean nextFrame(Bitmap bitmap) {
+    // TODO(vigneshv): Consider returning an avifResult here instead of just a boolean.
+    return nextFrame(decoder, bitmap);
+  }
+
+  private native boolean nextFrame(long decoder, Bitmap bitmap);
+
+  private native long createDecoder(ByteBuffer encoded, int length);
+
+  private native void destroyDecoder(long decoder);
 }
diff --git a/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc b/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc
index 0e1bf11..31d6f6f 100644
--- a/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc
+++ b/android_jni/avifandroidjni/src/main/jni/libavif_jni.cc
@@ -6,6 +6,8 @@
 #include <cpu-features.h>
 #include <jni.h>
 
+#include <new>
+
 #include "avif/avif.h"
 
 #define LOG_TAG "avif_jni"
@@ -15,10 +17,10 @@
 #define FUNC(RETURN_TYPE, NAME, ...)                                      \
   extern "C" {                                                            \
   JNIEXPORT RETURN_TYPE Java_org_aomedia_avif_android_AvifDecoder_##NAME( \
-      JNIEnv* env, jobject /*thiz*/, ##__VA_ARGS__);                      \
+      JNIEnv* env, jobject thiz, ##__VA_ARGS__);                          \
   }                                                                       \
   JNIEXPORT RETURN_TYPE Java_org_aomedia_avif_android_AvifDecoder_##NAME( \
-      JNIEnv* env, jobject /*thiz*/, ##__VA_ARGS__)
+      JNIEnv* env, jobject thiz, ##__VA_ARGS__)
 
 namespace {
 
@@ -26,6 +28,11 @@
 jfieldID global_info_height;
 jfieldID global_info_depth;
 jfieldID global_info_alpha_present;
+jfieldID global_width;
+jfieldID global_height;
+jfieldID global_depth;
+jfieldID global_frame_count;
+jfieldID global_repetition_count;
 
 // RAII wrapper class that properly frees the decoder related objects on
 // destruction.
@@ -78,6 +85,67 @@
   return true;
 }
 
+bool DecodeNextImage(JNIEnv* const env, AvifDecoderWrapper* const decoder,
+                     jobject bitmap) {
+  avifResult res = avifDecoderNextImage(decoder->decoder);
+  if (res != AVIF_RESULT_OK) {
+    LOGE("Failed to decode AVIF image. Status: %d", res);
+    return false;
+  }
+  AndroidBitmapInfo bitmap_info;
+  if (AndroidBitmap_getInfo(env, bitmap, &bitmap_info) < 0) {
+    LOGE("AndroidBitmap_getInfo failed.");
+    return false;
+  }
+  // Ensure that the bitmap is large enough to store the decoded image.
+  if (bitmap_info.width < decoder->decoder->image->width ||
+      bitmap_info.height < decoder->decoder->image->height) {
+    LOGE(
+        "Bitmap is not large enough to fit the image. Bitmap %dx%d Image "
+        "%dx%d.",
+        bitmap_info.width, bitmap_info.height, decoder->decoder->image->width,
+        decoder->decoder->image->height);
+    return false;
+  }
+  // Ensure that the bitmap format is RGBA_8888, RGB_565 or RGBA_F16.
+  if (bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_8888 &&
+      bitmap_info.format != ANDROID_BITMAP_FORMAT_RGB_565 &&
+      bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_F16) {
+    LOGE("Bitmap format (%d) is not supported.", bitmap_info.format);
+    return false;
+  }
+  void* bitmap_pixels = nullptr;
+  if (AndroidBitmap_lockPixels(env, bitmap, &bitmap_pixels) !=
+      ANDROID_BITMAP_RESULT_SUCCESS) {
+    LOGE("Failed to lock Bitmap.");
+    return false;
+  }
+  avifRGBImage rgb_image;
+  avifRGBImageSetDefaults(&rgb_image, decoder->decoder->image);
+  if (bitmap_info.format == ANDROID_BITMAP_FORMAT_RGBA_F16) {
+    rgb_image.depth = 16;
+    rgb_image.isFloat = AVIF_TRUE;
+  } else if (bitmap_info.format == ANDROID_BITMAP_FORMAT_RGB_565) {
+    rgb_image.format = AVIF_RGB_FORMAT_RGB_565;
+    rgb_image.depth = 8;
+  } else {
+    rgb_image.depth = 8;
+  }
+  rgb_image.pixels = static_cast<uint8_t*>(bitmap_pixels);
+  rgb_image.rowBytes = bitmap_info.stride;
+  // Android always sees the Bitmaps as premultiplied with alpha when it renders
+  // them:
+  // https://developer.android.com/reference/android/graphics/Bitmap#setPremultiplied(boolean)
+  rgb_image.alphaPremultiplied = AVIF_TRUE;
+  res = avifImageYUVToRGB(decoder->decoder->image, &rgb_image);
+  AndroidBitmap_unlockPixels(env, bitmap);
+  if (res != AVIF_RESULT_OK) {
+    LOGE("Failed to convert YUV Pixels to RGB. Status: %d", res);
+    return false;
+  }
+  return true;
+}
+
 }  // namespace
 
 jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) {
@@ -91,6 +159,14 @@
   global_info_height = env->GetFieldID(info_class, "height", "I");
   global_info_depth = env->GetFieldID(info_class, "depth", "I");
   global_info_alpha_present = env->GetFieldID(info_class, "alphaPresent", "Z");
+  const jclass avif_decoder_class =
+      env->FindClass("org/aomedia/avif/android/AvifDecoder");
+  global_width = env->GetFieldID(avif_decoder_class, "width", "I");
+  global_height = env->GetFieldID(avif_decoder_class, "height", "I");
+  global_depth = env->GetFieldID(avif_decoder_class, "depth", "I");
+  global_frame_count = env->GetFieldID(avif_decoder_class, "frameCount", "I");
+  global_repetition_count =
+      env->GetFieldID(avif_decoder_class, "repetitionCount", "I");
   return JNI_VERSION_1_6;
 }
 
@@ -105,7 +181,7 @@
   const uint8_t* const buffer =
       static_cast<const uint8_t*>(env->GetDirectBufferAddress(encoded));
   AvifDecoderWrapper decoder;
-  if (!CreateDecoderAndParse(&decoder, buffer, length, /*threads=*/ 1)) {
+  if (!CreateDecoderAndParse(&decoder, buffer, length, /*threads=*/1)) {
     return false;
   }
   env->SetIntField(info, global_info_width, decoder.decoder->image->width);
@@ -125,65 +201,41 @@
       static_cast<const uint8_t*>(env->GetDirectBufferAddress(encoded));
   AvifDecoderWrapper decoder;
   if (!CreateDecoderAndParse(
-      &decoder, buffer, length,
-      (threads == 0) ? android_getCpuCount() : threads)) {
+          &decoder, buffer, length,
+          (threads == 0) ? android_getCpuCount() : threads)) {
     return false;
   }
-  avifResult res = avifDecoderNextImage(decoder.decoder);
-  if (res != AVIF_RESULT_OK) {
-    LOGE("Failed to decode AVIF image. Status: %d", res);
-    return false;
+  return DecodeNextImage(env, &decoder, bitmap);
+}
+
+FUNC(jlong, createDecoder, jobject encoded, int length) {
+  const uint8_t* const buffer =
+      static_cast<const uint8_t*>(env->GetDirectBufferAddress(encoded));
+  AvifDecoderWrapper* decoder = new (std::nothrow) AvifDecoderWrapper();
+  if (decoder == nullptr) {
+    return 0;
   }
-  AndroidBitmapInfo bitmap_info;
-  if (AndroidBitmap_getInfo(env, bitmap, &bitmap_info) < 0) {
-    LOGE("AndroidBitmap_getInfo failed.");
-    return false;
+  // TODO(b/272577342): Make threads configurable.
+  if (!CreateDecoderAndParse(decoder, buffer, length, /*threads=*/1)) {
+    return 0;
   }
-  // Ensure that the bitmap is large enough to store the decoded image.
-  if (bitmap_info.width < decoder.decoder->image->width ||
-      bitmap_info.height < decoder.decoder->image->height) {
-    LOGE(
-        "Bitmap is not large enough to fit the image. Bitmap %dx%d Image "
-        "%dx%d.",
-        bitmap_info.width, bitmap_info.height, decoder.decoder->image->width,
-        decoder.decoder->image->height);
-    return false;
-  }
-  // Ensure that the bitmap format is RGBA_8888, RGB_565 or RGBA_F16.
-  if (bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_8888 &&
-      bitmap_info.format != ANDROID_BITMAP_FORMAT_RGB_565 &&
-      bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_F16) {
-    LOGE("Bitmap format (%d) is not supported.", bitmap_info.format);
-    return false;
-  }
-  void* bitmap_pixels = nullptr;
-  if (AndroidBitmap_lockPixels(env, bitmap, &bitmap_pixels) !=
-      ANDROID_BITMAP_RESULT_SUCCESS) {
-    LOGE("Failed to lock Bitmap.");
-    return false;
-  }
-  avifRGBImage rgb_image;
-  avifRGBImageSetDefaults(&rgb_image, decoder.decoder->image);
-  if (bitmap_info.format == ANDROID_BITMAP_FORMAT_RGBA_F16) {
-    rgb_image.depth = 16;
-    rgb_image.isFloat = AVIF_TRUE;
-  } else if (bitmap_info.format == ANDROID_BITMAP_FORMAT_RGB_565) {
-    rgb_image.format = AVIF_RGB_FORMAT_RGB_565;
-    rgb_image.depth = 8;
-  } else {
-    rgb_image.depth = 8;
-  }
-  rgb_image.pixels = static_cast<uint8_t*>(bitmap_pixels);
-  rgb_image.rowBytes = bitmap_info.stride;
-  // Android always sees the Bitmaps as premultiplied with alpha when it renders
-  // them:
-  // https://developer.android.com/reference/android/graphics/Bitmap#setPremultiplied(boolean)
-  rgb_image.alphaPremultiplied = AVIF_TRUE;
-  res = avifImageYUVToRGB(decoder.decoder->image, &rgb_image);
-  AndroidBitmap_unlockPixels(env, bitmap);
-  if (res != AVIF_RESULT_OK) {
-    LOGE("Failed to convert YUV Pixels to RGB. Status: %d", res);
-    return false;
-  }
-  return true;
+  env->SetIntField(thiz, global_width, decoder->decoder->image->width);
+  env->SetIntField(thiz, global_height, decoder->decoder->image->height);
+  env->SetIntField(thiz, global_depth, decoder->decoder->image->depth);
+  env->SetIntField(thiz, global_frame_count, decoder->decoder->imageCount);
+  env->SetIntField(thiz, global_repetition_count,
+                   decoder->decoder->repetitionCount);
+  return reinterpret_cast<jlong>(decoder);
+}
+
+FUNC(jboolean, nextFrame, jlong jdecoder, jobject bitmap) {
+  AvifDecoderWrapper* const decoder =
+      reinterpret_cast<AvifDecoderWrapper*>(jdecoder);
+  return DecodeNextImage(env, decoder, bitmap);
+}
+
+FUNC(void, destroyDecoder, jlong jdecoder) {
+  AvifDecoderWrapper* const decoder =
+      reinterpret_cast<AvifDecoderWrapper*>(jdecoder);
+  delete decoder;
 }