More codec API tuning to allow multiple frame samples to be emitted during encoding
diff --git a/src/codec_aom.c b/src/codec_aom.c
index b313a12..fb36328 100644
--- a/src/codec_aom.c
+++ b/src/codec_aom.c
@@ -235,7 +235,7 @@
     return fmt;
 }
 
-static avifBool aomCodecEncodeImage(avifCodec * codec, const avifImage * image, avifEncoder * encoder, avifBool alpha)
+static avifBool aomCodecEncodeImage(avifCodec * codec, const avifImage * image, avifEncoder * encoder, avifBool alpha, avifRWData * obu)
 {
     if (!codec->internal->encoderInitialized) {
         // Map encoder speed to AOM usage + CpuUsed:
@@ -392,6 +392,19 @@
     }
 
     aom_codec_encode(&codec->internal->encoder, aomImage, 0, 1, 0);
+
+    aom_codec_iter_t iter = NULL;
+    for (;;) {
+        const aom_codec_cx_pkt_t * pkt = aom_codec_get_cx_data(&codec->internal->encoder, &iter);
+        if (pkt == NULL) {
+            break;
+        }
+        if (pkt->kind == AOM_CODEC_CX_FRAME_PKT) {
+            avifRWDataSet(obu, pkt->data.frame.buf, pkt->data.frame.sz);
+            break;
+        }
+    }
+
     aom_img_free(aomImage);
     return AVIF_TRUE;
 }
diff --git a/src/codec_rav1e.c b/src/codec_rav1e.c
index 02bccf9..acf987e 100644
--- a/src/codec_rav1e.c
+++ b/src/codec_rav1e.c
@@ -31,7 +31,7 @@
     return AVIF_TRUE;
 }
 
-static avifBool rav1eCodecEncodeImage(avifCodec * codec, const avifImage * image, avifEncoder * encoder, avifBool alpha)
+static avifBool rav1eCodecEncodeImage(avifCodec * codec, const avifImage * image, avifEncoder * encoder, avifBool alpha, avifRWData * obu)
 {
     (void)codec; // unused
 
@@ -151,11 +151,16 @@
     if (encoderStatus != 0) {
         goto cleanup;
     }
-    encoderStatus = rav1e_send_frame(codec->internal->rav1eContext, NULL); // flush
+
+    RaPacket * pkt = NULL;
+    encoderStatus = rav1e_receive_packet(codec->internal->rav1eContext, &pkt);
     if (encoderStatus != 0) {
         goto cleanup;
+    } else if (pkt && pkt->data && (pkt->len > 0)) {
+        avifRWDataSet(obu, pkt->data, pkt->len);
+        rav1e_packet_unref(pkt);
+        pkt = NULL;
     }
-
     success = AVIF_TRUE;
 cleanup:
     if (rav1eFrame) {
@@ -171,15 +176,22 @@
 
 static avifBool rav1eCodecEncodeFinish(avifCodec * codec, avifRWData * obu)
 {
+    RaEncoderStatus encoderStatus = rav1e_send_frame(codec->internal->rav1eContext, NULL); // flush
+    if (encoderStatus != 0) {
+        return AVIF_FALSE;
+    }
+
     RaPacket * pkt = NULL;
-    RaEncoderStatus encoderStatus = rav1e_receive_packet(codec->internal->rav1eContext, &pkt);
-    if ((encoderStatus == 0) && pkt && pkt->data && (pkt->len > 0)) {
+    encoderStatus = rav1e_receive_packet(codec->internal->rav1eContext, &pkt);
+    if (encoderStatus != 0) {
+        return AVIF_FALSE;
+    }
+    if (pkt && pkt->data && (pkt->len > 0)) {
         avifRWDataSet(obu, pkt->data, pkt->len);
         rav1e_packet_unref(pkt);
         pkt = NULL;
-        return AVIF_TRUE;
     }
-    return AVIF_FALSE;
+    return AVIF_TRUE;
 }
 
 const char * avifCodecVersionRav1e(void)
diff --git a/src/write.c b/src/write.c
index 434e95d..989dfe0 100644
--- a/src/write.c
+++ b/src/write.c
@@ -37,8 +37,9 @@
 {
     uint16_t id;
     uint8_t type[4];
-    avifCodec * codec;  // only present on type==av01
-    avifRWData content; // OBU data on av01, metadata payload for Exif/XMP
+    avifCodec * codec;       // only present on type==av01
+    avifRWDataArray samples; // AV1 sample data for image sequences
+    avifRWData content;      // OBU data on av01 items-based image, metadata payload for Exif/XMP
     avifBool alpha;
 
     const char * infeName;
@@ -65,6 +66,7 @@
     avifEncoderItem * alphaItem;
     uint16_t lastItemID;
     uint16_t primaryItemID;
+    uint32_t receivedFrameCount; // incremented on each call to avifEncoderAddImage()
 } avifEncoderData;
 
 static avifEncoderData * avifEncoderDataCreate()
@@ -84,6 +86,7 @@
     memcpy(item->type, type, sizeof(item->type));
     item->infeName = infeName;
     item->infeNameSize = infeNameSize;
+    avifArrayCreate(&item->samples, sizeof(avifRWData), 1);
     return item;
 }
 
@@ -94,6 +97,10 @@
         if (item->codec) {
             avifCodecDestroy(item->codec);
         }
+        for (uint32_t sampleIndex = 0; sampleIndex < item->samples.count; ++sampleIndex) {
+            avifRWDataFree(&item->samples.raw[sampleIndex]);
+        }
+        avifArrayDestroy(&item->samples);
         avifRWDataFree(&item->content);
     }
     avifImageDestroy(data->imageMetadata);
@@ -238,11 +245,18 @@
     for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
         avifEncoderItem * item = &encoder->data->items.item[itemIndex];
         if (item->codec) {
-            if (!item->codec->encodeImage(item->codec, image, encoder, item->alpha)) {
+            avifRWData tmpSampleData = AVIF_DATA_EMPTY;
+            if (!item->codec->encodeImage(item->codec, image, encoder, item->alpha, &tmpSampleData)) {
                 return item->alpha ? AVIF_RESULT_ENCODE_ALPHA_FAILED : AVIF_RESULT_ENCODE_COLOR_FAILED;
             }
+            if (tmpSampleData.data && tmpSampleData.size) {
+                avifRWData * sampleData = (avifRWData *)avifArrayPushPtr(&item->samples);
+                memcpy(sampleData, &tmpSampleData, sizeof(avifRWData));
+            }
         }
     }
+
+    ++encoder->data->receivedFrameCount;
     return AVIF_RESULT_OK;
 }
 
@@ -258,18 +272,41 @@
     for (uint32_t itemIndex = 0; itemIndex < encoder->data->items.count; ++itemIndex) {
         avifEncoderItem * item = &encoder->data->items.item[itemIndex];
         if (item->codec) {
-            if (!item->codec->encodeFinish(item->codec, &item->content)) {
+            avifRWData tmpSampleData = AVIF_DATA_EMPTY;
+            if (!item->codec->encodeFinish(item->codec, &tmpSampleData)) {
+                return item->alpha ? AVIF_RESULT_ENCODE_ALPHA_FAILED : AVIF_RESULT_ENCODE_COLOR_FAILED;
+            }
+            if (tmpSampleData.data && tmpSampleData.size) {
+                avifRWData * sampleData = (avifRWData *)avifArrayPushPtr(&item->samples);
+                memcpy(sampleData, &tmpSampleData, sizeof(avifRWData));
+            }
+
+            if (item->samples.count != encoder->data->receivedFrameCount) {
                 return item->alpha ? AVIF_RESULT_ENCODE_ALPHA_FAILED : AVIF_RESULT_ENCODE_COLOR_FAILED;
             }
 
+            size_t obuSize = 0;
+            for (uint32_t sampleIndex = 0; sampleIndex < item->samples.count; ++sampleIndex) {
+                obuSize += item->samples.raw[sampleIndex].size;
+            }
             if (item->alpha) {
-                encoder->ioStats.alphaOBUSize = item->content.size;
+                encoder->ioStats.alphaOBUSize = obuSize;
             } else {
-                encoder->ioStats.colorOBUSize = item->content.size;
+                encoder->ioStats.colorOBUSize = obuSize;
+            }
+
+            if (item->samples.count == 1) {
+                // Detected a single image (non-sequence). Hand over the only sample to item->content
+                // so that the image is encoded with items instead of tracks.
+                memcpy(&item->content, &item->samples.raw[0], sizeof(avifRWData));
+                memset(&item->samples.raw[0], 0, sizeof(avifRWData));
+                item->samples.count = 0;
             }
         }
     }
 
+    // TODO: If encoder->data->receivedFrameCount > 1, encode with tracks instead of items
+
     // -----------------------------------------------------------------------
     // Begin write stream