avifIO reader API

* Add avifIO, which offers a reader API for libavif
* Add a few new avifIO-related avifResult enums
* Refactor high-level calls in read.c to leverage avifIO
* avifEncoder can now ignoreExif and ignoreXMP during Parse
* Simplified codec API to account for new avifIO reader flow
* Switch avifdec to using avifIO
* Added "streaming" testing to aviftest
diff --git a/src/avif.c b/src/avif.c
index cf6ddc4..7864433 100644
--- a/src/avif.c
+++ b/src/avif.c
@@ -87,6 +87,10 @@
         case AVIF_RESULT_INVALID_EXIF_PAYLOAD:          return "Invalid Exif payload";
         case AVIF_RESULT_INVALID_IMAGE_GRID:            return "Invalid image grid";
         case AVIF_RESULT_INVALID_CODEC_SPECIFIC_OPTION: return "Invalid codec-specific option";
+        case AVIF_RESULT_TRUNCATED_DATA:                return "Truncated data";
+        case AVIF_RESULT_NO_IO:                         return "No IO";
+        case AVIF_RESULT_IO_ERROR:                      return "IO Error";
+        case AVIF_RESULT_WAITING_ON_IO:                 return "Waiting on IO";
         case AVIF_RESULT_UNKNOWN_ERROR:
         default:
             break;
diff --git a/src/codec_aom.c b/src/codec_aom.c
index cc99b3d..6bae52b 100644
--- a/src/codec_aom.c
+++ b/src/codec_aom.c
@@ -34,7 +34,6 @@
     avifBool decoderInitialized;
     aom_codec_ctx_t decoder;
     aom_codec_iter_t iter;
-    uint32_t inputSampleIndex;
     aom_image_t * image;
 
     avifBool encoderInitialized;
@@ -55,7 +54,7 @@
     avifFree(codec->internal);
 }
 
-static avifBool aomCodecOpen(struct avifCodec * codec, uint32_t firstSampleIndex)
+static avifBool aomCodecOpen(struct avifCodec * codec)
 {
     aom_codec_iface_t * decoder_interface = aom_codec_av1_dx();
     if (aom_codec_dec_init(&codec->internal->decoder, decoder_interface, NULL, 0)) {
@@ -70,12 +69,11 @@
         return AVIF_FALSE;
     }
 
-    codec->internal->inputSampleIndex = firstSampleIndex;
     codec->internal->iter = NULL;
     return AVIF_TRUE;
 }
 
-static avifBool aomCodecGetNextImage(avifCodec * codec, avifImage * image)
+static avifBool aomCodecGetNextImage(struct avifCodec * codec, avifDecodeSample * sample, avifBool alpha, avifImage * image)
 {
     aom_image_t * nextFrame = NULL;
     for (;;) {
@@ -83,10 +81,8 @@
         if (nextFrame) {
             // Got an image!
             break;
-        } else if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
+        } else if (sample) {
             // Feed another sample
-            avifDecodeSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
-            ++codec->internal->inputSampleIndex;
             codec->internal->iter = NULL;
             if (aom_codec_decode(&codec->internal->decoder, sample->data.data, sample->data.size, NULL)) {
                 return AVIF_FALSE;
@@ -100,14 +96,14 @@
     if (nextFrame) {
         codec->internal->image = nextFrame;
     } else {
-        if (codec->decodeInput->alpha && codec->internal->image) {
+        if (alpha && codec->internal->image) {
             // Special case: reuse last alpha frame
         } else {
             return AVIF_FALSE;
         }
     }
 
-    avifBool isColor = !codec->decodeInput->alpha;
+    avifBool isColor = !alpha;
     if (isColor) {
         // Color (YUV) planes - set image to correct size / format, fill color
 
diff --git a/src/codec_dav1d.c b/src/codec_dav1d.c
index 6c30d12..fcd4af8 100644
--- a/src/codec_dav1d.c
+++ b/src/codec_dav1d.c
@@ -30,7 +30,6 @@
     avifBool hasPicture;
     avifRange colorRange;
     Dav1dData dav1dData;
-    uint32_t inputSampleIndex;
 };
 
 static void avifDav1dFreeCallback(const uint8_t * buf, void * cookie)
@@ -55,13 +54,10 @@
 }
 
 // returns AVIF_FALSE if there's nothing left to feed, or feeding fatally fails (say that five times fast)
-static avifBool dav1dFeedData(avifCodec * codec)
+static avifBool dav1dFeedData(avifCodec * codec, avifDecodeSample * sample)
 {
     if (!codec->internal->dav1dData.sz) {
-        if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
-            avifDecodeSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
-            ++codec->internal->inputSampleIndex;
-
+        if (sample) {
             if (dav1d_data_wrap(&codec->internal->dav1dData, sample->data.data, sample->data.size, avifDav1dFreeCallback, NULL) != 0) {
                 return AVIF_FALSE;
             }
@@ -78,26 +74,24 @@
     return AVIF_TRUE;
 }
 
-static avifBool dav1dCodecOpen(avifCodec * codec, uint32_t firstSampleIndex)
+static avifBool dav1dCodecOpen(avifCodec * codec)
 {
     if (codec->internal->dav1dContext == NULL) {
         if (dav1d_open(&codec->internal->dav1dContext, &codec->internal->dav1dSettings) != 0) {
             return AVIF_FALSE;
         }
     }
-
-    codec->internal->inputSampleIndex = firstSampleIndex;
     return AVIF_TRUE;
 }
 
-static avifBool dav1dCodecGetNextImage(avifCodec * codec, avifImage * image)
+static avifBool dav1dCodecGetNextImage(struct avifCodec * codec, avifDecodeSample * sample, avifBool alpha, avifImage * image)
 {
     avifBool gotPicture = AVIF_FALSE;
     Dav1dPicture nextFrame;
     memset(&nextFrame, 0, sizeof(Dav1dPicture));
 
     for (;;) {
-        avifBool sentData = dav1dFeedData(codec);
+        avifBool sentData = dav1dFeedData(codec, sample);
         int res = dav1d_get_picture(codec->internal->dav1dContext, &nextFrame);
         if ((res == DAV1D_ERR(EAGAIN)) && sentData) {
             // send more data
@@ -118,7 +112,7 @@
         codec->internal->colorRange = codec->internal->dav1dPicture.seq_hdr->color_range ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED;
         codec->internal->hasPicture = AVIF_TRUE;
     } else {
-        if (codec->decodeInput->alpha && codec->internal->hasPicture) {
+        if (alpha && codec->internal->hasPicture) {
             // Special case: reuse last alpha frame
         } else {
             return AVIF_FALSE;
@@ -126,7 +120,7 @@
     }
 
     Dav1dPicture * dav1dImage = &codec->internal->dav1dPicture;
-    avifBool isColor = !codec->decodeInput->alpha;
+    avifBool isColor = !alpha;
     if (isColor) {
         // Color (YUV) planes - set image to correct size / format, fill color
 
diff --git a/src/codec_libgav1.c b/src/codec_libgav1.c
index cb020ca..75de100 100644
--- a/src/codec_libgav1.c
+++ b/src/codec_libgav1.c
@@ -13,7 +13,6 @@
     Libgav1Decoder * gav1Decoder;
     const Libgav1DecoderBuffer * gav1Image;
     avifRange colorRange;
-    uint32_t inputSampleIndex;
 };
 
 static void gav1CodecDestroyInternal(avifCodec * codec)
@@ -24,26 +23,22 @@
     avifFree(codec->internal);
 }
 
-static avifBool gav1CodecOpen(avifCodec * codec, uint32_t firstSampleIndex)
+static avifBool gav1CodecOpen(avifCodec * codec)
 {
     if (codec->internal->gav1Decoder == NULL) {
         if (Libgav1DecoderCreate(&codec->internal->gav1Settings, &codec->internal->gav1Decoder) != kLibgav1StatusOk) {
             return AVIF_FALSE;
         }
     }
-
-    codec->internal->inputSampleIndex = firstSampleIndex;
     return AVIF_TRUE;
 }
 
-static avifBool gav1CodecGetNextImage(avifCodec * codec, avifImage * image)
+static avifBool gav1CodecGetNextImage(struct avifCodec * codec, avifDecodeSample * sample, avifBool alpha, avifImage * image)
 {
     const Libgav1DecoderBuffer * nextFrame = NULL;
     // Check if there are more samples to feed
-    if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
+    if (sample) {
         // Feed another sample
-        avifDecodeSample * sample = &codec->decodeInput->samples.sample[codec->internal->inputSampleIndex];
-        ++codec->internal->inputSampleIndex;
         if (Libgav1DecoderEnqueueFrame(codec->internal->gav1Decoder,
                                        sample->data.data,
                                        sample->data.size,
@@ -65,7 +60,7 @@
         codec->internal->gav1Image = nextFrame;
         codec->internal->colorRange = (nextFrame->color_range == kLibgav1ColorRangeStudio) ? AVIF_RANGE_LIMITED : AVIF_RANGE_FULL;
     } else {
-        if (codec->decodeInput->alpha && codec->internal->gav1Image) {
+        if (alpha && codec->internal->gav1Image) {
             // Special case: reuse last alpha frame
         } else {
             return AVIF_FALSE;
@@ -73,7 +68,7 @@
     }
 
     const Libgav1DecoderBuffer * gav1Image = codec->internal->gav1Image;
-    avifBool isColor = !codec->decodeInput->alpha;
+    avifBool isColor = !alpha;
     if (isColor) {
         // Color (YUV) planes - set image to correct size / format, fill color
 
diff --git a/src/codec_rav1e.c b/src/codec_rav1e.c
index f7605ef..c0d18c6 100644
--- a/src/codec_rav1e.c
+++ b/src/codec_rav1e.c
@@ -23,10 +23,8 @@
     avifFree(codec->internal);
 }
 
-static avifBool rav1eCodecOpen(struct avifCodec * codec, uint32_t firstSampleIndex)
+static avifBool rav1eCodecOpen(struct avifCodec * codec)
 {
-    (void)firstSampleIndex; // Codec is encode-only, this isn't used
-
     codec->internal->rav1eContext = NULL;
     return AVIF_TRUE;
 }
diff --git a/src/io.c b/src/io.c
new file mode 100644
index 0000000..07b2c8f
--- /dev/null
+++ b/src/io.c
@@ -0,0 +1,154 @@
+// Copyright 2020 Joe Drago. All rights reserved.
+// SPDX-License-Identifier: BSD-2-Clause
+
+#include "avif/internal.h"
+
+#include <stdio.h>
+#include <string.h>
+
+void avifIODestroy(avifIO * io)
+{
+    if (io && io->destroy) {
+        io->destroy(io);
+    }
+}
+
+// --------------------------------------------------------------------------------------
+// avifIOMemoryReader
+
+typedef struct avifIOMemoryReader
+{
+    avifIO io; // this must be the first member for easy casting to avifIO*
+    avifROData rodata;
+} avifIOMemoryReader;
+
+static avifResult avifIOMemoryReaderRead(struct avifIO * io, uint32_t readFlags, uint64_t offset, uint64_t size, avifROData * out)
+{
+    // printf("avifIOMemoryReaderRead offset %zu size %zu\n", offset, size);
+
+    (void)readFlags;
+
+    avifIOMemoryReader * reader = (avifIOMemoryReader *)io;
+
+    // Sanitize/clamp incoming request
+    if (offset >= reader->rodata.size) {
+        offset = 0;
+        size = 0;
+    }
+    if ((offset + size) > reader->rodata.size) {
+        size = reader->rodata.size - offset;
+    }
+
+    out->data = reader->rodata.data + offset;
+    out->size = size;
+    return AVIF_RESULT_OK;
+}
+
+static void avifIOMemoryReaderDestroy(struct avifIO * io)
+{
+    avifFree(io);
+}
+
+avifIO * avifIOCreateMemoryReader(const uint8_t * data, size_t size)
+{
+    avifIOMemoryReader * reader = avifAlloc(sizeof(avifIOMemoryReader));
+    memset(reader, 0, sizeof(avifIOMemoryReader));
+    reader->io.destroy = avifIOMemoryReaderDestroy;
+    reader->io.read = avifIOMemoryReaderRead;
+    reader->io.sizeLimit = size;
+    reader->io.persistent = AVIF_TRUE;
+    reader->rodata.data = data;
+    reader->rodata.size = size;
+    return (avifIO *)reader;
+}
+
+// --------------------------------------------------------------------------------------
+// avifIOFileReader
+
+typedef struct avifIOFileReader
+{
+    avifIO io; // this must be the first member for easy casting to avifIO*
+    avifRWData buffer;
+    FILE * f;
+    avifBool closeOnDestroy;
+} avifIOFileReader;
+
+static avifResult avifIOFileReaderRead(struct avifIO * io, uint32_t readFlags, uint64_t offset, uint64_t size, avifROData * out)
+{
+    // printf("avifIOFileReaderRead offset %zu size %zu\n", offset, size);
+
+    (void)readFlags;
+
+    avifIOFileReader * reader = (avifIOFileReader *)io;
+    if (!reader->f) {
+        return AVIF_RESULT_IO_ERROR;
+    }
+
+    if (offset < reader->io.sizeLimit) {
+        uint64_t availableSize = reader->io.sizeLimit - offset;
+        if (size > availableSize) {
+            size = availableSize;
+        }
+    } else {
+        // The offset is past the EOF.
+        size = 0;
+    }
+
+    if (size > 0) {
+        if (reader->buffer.size < size) {
+            avifRWDataRealloc(&reader->buffer, size);
+        }
+        if (fseek(reader->f, (long)offset, SEEK_SET) != 0) {
+            return AVIF_RESULT_IO_ERROR;
+        }
+        size_t bytesRead = fread(reader->buffer.data, 1, size, reader->f);
+        if (size != bytesRead) {
+            size = bytesRead;
+        }
+    }
+
+    out->data = reader->buffer.data;
+    out->size = size;
+    return AVIF_RESULT_OK;
+}
+
+static void avifIOFileReaderDestroy(struct avifIO * io)
+{
+    avifIOFileReader * reader = (avifIOFileReader *)io;
+    if (reader->closeOnDestroy && reader->f) {
+        fclose(reader->f);
+    }
+    avifRWDataFree(&reader->buffer);
+    avifFree(io);
+}
+
+avifIO * avifIOCreateFilePtrReader(FILE * f, avifBool closeOnDestroy)
+{
+    if (!f) {
+        return NULL;
+    }
+
+    fseek(f, 0, SEEK_END);
+    size_t fileSize = (size_t)ftell(f);
+    fseek(f, 0, SEEK_SET);
+
+    avifIOFileReader * reader = avifAlloc(sizeof(avifIOFileReader));
+    memset(reader, 0, sizeof(avifIOFileReader));
+    reader->f = f;
+    reader->closeOnDestroy = closeOnDestroy;
+    reader->io.destroy = avifIOFileReaderDestroy;
+    reader->io.read = avifIOFileReaderRead;
+    reader->io.sizeLimit = fileSize;
+    reader->io.persistent = AVIF_FALSE;
+    avifRWDataRealloc(&reader->buffer, 1024);
+    return (avifIO *)reader;
+}
+
+avifIO * avifIOCreateFileReader(const char * filename)
+{
+    FILE * f = fopen(filename, "rb");
+    if (!f) {
+        return NULL;
+    }
+    return avifIOCreateFilePtrReader(f, AVIF_TRUE);
+}
diff --git a/src/read.c b/src/read.c
index 2916712..e279ab1 100644
--- a/src/read.c
+++ b/src/read.c
@@ -136,11 +136,13 @@
     avifContentType contentType;
     avifPropertyArray properties;
     avifDecoderItemExtentArray extents; // All extent offsets/sizes
-    avifRWData mergedExtents; // if populated, is a single contiguous block of this item's extents (unused when extents.count == 1)
-    uint32_t thumbnailForID;  // if non-zero, this item is a thumbnail for Item #{thumbnailForID}
-    uint32_t auxForID;        // if non-zero, this item is an auxC plane for Item #{auxForID}
-    uint32_t descForID;       // if non-zero, this item is a content description for Item #{descForID}
-    uint32_t dimgForID;       // if non-zero, this item is a derived image for Item #{dimgForID}
+    avifRWData mergedExtents;      // if set, is a single contiguous block of this item's extents (unused when extents.count == 1)
+    avifBool ownsMergedExtents;    // if true, mergedExtents must be freed when this item is destroyed
+    avifBool partialMergedExtents; // If true, mergedExtents doesn't have all of the item data yet
+    uint32_t thumbnailForID;       // if non-zero, this item is a thumbnail for Item #{thumbnailForID}
+    uint32_t auxForID;             // if non-zero, this item is an auxC plane for Item #{auxForID}
+    uint32_t descForID;            // if non-zero, this item is a content description for Item #{descForID}
+    uint32_t dimgForID;            // if non-zero, this item is a derived image for Item #{dimgForID}
     avifBool hasUnsupportedEssentialProperty; // If true, this item cites a property flagged as 'essential' that libavif doesn't support (yet). Ignore the item, if so.
 } avifDecoderItem;
 AVIF_ARRAY_DECLARE(avifDecoderItemArray, avifDecoderItem, item);
@@ -149,7 +151,7 @@
 typedef struct avifDecoderItemData
 {
     uint32_t id;
-    avifROData data;
+    avifRWData data;
 } avifDecoderItemData;
 AVIF_ARRAY_DECLARE(avifDecoderItemDataArray, avifDecoderItemData, idat);
 
@@ -317,11 +319,17 @@
 
 void avifCodecDecodeInputDestroy(avifCodecDecodeInput * decodeInput)
 {
+    for (uint32_t sampleIndex = 0; sampleIndex < decodeInput->samples.count; ++sampleIndex) {
+        avifDecodeSample * sample = &decodeInput->samples.sample[sampleIndex];
+        if (sample->ownsData) {
+            avifRWDataFree((avifRWData *)&sample->data);
+        }
+    }
     avifArrayDestroy(&decodeInput->samples);
     avifFree(decodeInput);
 }
 
-static avifBool avifCodecDecodeInputGetSamples(avifCodecDecodeInput * decodeInput, avifSampleTable * sampleTable, avifROData * rawInput)
+static avifBool avifCodecDecodeInputGetSamples(avifCodecDecodeInput * decodeInput, avifSampleTable * sampleTable, size_t sizeLimit)
 {
     uint32_t sampleSizeIndex = 0;
     for (uint32_t chunkIndex = 0; chunkIndex < sampleTable->chunks.count; ++chunkIndex) {
@@ -354,11 +362,11 @@
             }
 
             avifDecodeSample * sample = (avifDecodeSample *)avifArrayPushPtr(&decodeInput->samples);
-            sample->data.data = rawInput->data + sampleOffset;
-            sample->data.size = sampleSize;
+            sample->offset = sampleOffset;
+            sample->size = sampleSize;
             sample->sync = AVIF_FALSE; // to potentially be set to true following the outer loop
 
-            if ((sampleOffset + sampleSize) > (uint64_t)rawInput->size) {
+            if (sizeLimit && ((sampleOffset + sampleSize) > (uint64_t)sizeLimit)) {
                 return AVIF_FALSE;
             }
 
@@ -479,10 +487,16 @@
         avifDecoderItem * item = &meta->items.item[i];
         avifArrayDestroy(&item->properties);
         avifArrayDestroy(&item->extents);
-        avifRWDataFree(&item->mergedExtents);
+        if (item->ownsMergedExtents) {
+            avifRWDataFree(&item->mergedExtents);
+        }
     }
     avifArrayDestroy(&meta->items);
     avifArrayDestroy(&meta->properties);
+    for (uint32_t i = 0; i < meta->idats.count; ++i) {
+        avifDecoderItemData * idat = &meta->idats.idat[i];
+        avifRWDataFree(&idat->data);
+    }
     avifArrayDestroy(&meta->idats);
     avifFree(meta);
 }
@@ -510,9 +524,9 @@
 typedef struct avifDecoderData
 {
     avifFileType ftyp;
-    avifMeta * meta; // The root-level meta box
+    avifRWData ftypData; // cloned from avifIO
+    avifMeta * meta;     // The root-level meta box
     avifTrackArray tracks;
-    avifROData rawInput;
     avifTileArray tiles;
     unsigned int colorTileCount;
     unsigned int alphaTileCount;
@@ -591,6 +605,7 @@
 
 static void avifDecoderDataDestroy(avifDecoderData * data)
 {
+    avifRWDataFree(&data->ftypData);
     avifMetaDestroy(data->meta);
     for (uint32_t i = 0; i < data->tracks.count; ++i) {
         avifTrack * track = &data->tracks.track[i];
@@ -607,85 +622,114 @@
     avifFree(data);
 }
 
-static const uint8_t * avifDecoderDataCalcItemPtr(avifDecoderData * data, avifDecoderItem * item)
+static avifResult avifDecoderReadItem(avifDecoder * decoder, avifDecoderItem * item, avifROData * outData, size_t partialByteCount)
 {
-    if (item->mergedExtents.data) {
+    if (item->mergedExtents.data && !item->partialMergedExtents) {
         // Multiple extents have already been concatenated for this item, just return it
-        return item->mergedExtents.data;
+        memcpy(outData, &item->mergedExtents.data, sizeof(avifROData));
+        return AVIF_RESULT_OK;
+    }
+
+    if (item->extents.count == 0) {
+        return AVIF_RESULT_TRUNCATED_DATA;
     }
 
     // Find this item's source of all extents' data, based on the construction method
-    avifROData * offsetBuffer = NULL;
-    if (item->idatID == 0) {
-        // construction_method: file(0)
-
-        offsetBuffer = &data->rawInput;
-    } else {
+    avifRWData * idatBuffer = NULL;
+    if (item->idatID == 1) {
         // construction_method: idat(1)
 
         // Find associated idat block
         for (uint32_t i = 0; i < item->meta->idats.count; ++i) {
             if (item->meta->idats.idat[i].id == item->idatID) {
-                offsetBuffer = &item->meta->idats.idat[i].data;
+                idatBuffer = &item->meta->idats.idat[i].data;
                 break;
             }
         }
 
-        if (offsetBuffer == NULL) {
+        if (idatBuffer == NULL) {
             // no idat box was found in this meta box, bail out
-            return NULL;
+            return AVIF_RESULT_NO_CONTENT;
         }
     }
 
-    if (item->extents.count == 0) {
-        return NULL;
-    } else if (item->extents.count == 1) {
-        // A single extent; just use the pre-existing buffer
-
-        avifDecoderItemExtent * extent = &item->extents.extent[0];
-        if (extent->offset > offsetBuffer->size) {
-            return NULL;
-        }
-        uint64_t offsetSize = (uint64_t)extent->offset + (uint64_t)extent->size;
-        if (offsetSize > (uint64_t)offsetBuffer->size) {
-            return NULL;
-        }
-        return offsetBuffer->data + extent->offset;
-    }
-
-    // Multiple extents; merge them into a single contiguous buffer
-    if (item->size > offsetBuffer->size) {
+    // Merge extents into a single contiguous buffer
+    if ((decoder->io->sizeLimit > 0) && (item->size > decoder->io->sizeLimit)) {
         // Sanity check: somehow the sum of extents for this item exceeds the entire file or idat
         // size!
-        return NULL;
+        return AVIF_RESULT_TRUNCATED_DATA;
     }
-    avifRWDataRealloc(&item->mergedExtents, item->size);
+
+    // If there is a single extent for this item and the source of the read buffer is going to be
+    // persistent for the lifetime of the avifDecoder (whether it comes from its own internal
+    // idatBuffer or from a known-persistent IO), we can avoid buffer duplication and just use the
+    // preexisting buffer.
+    avifBool singlePersistentBuffer = ((item->extents.count == 1) && (idatBuffer || decoder->io->persistent));
+    if (!singlePersistentBuffer) {
+        avifRWDataRealloc(&item->mergedExtents, item->size);
+        item->ownsMergedExtents = AVIF_TRUE;
+    }
+
+    // Set this until we manage to fill the entire mergedExtents buffer
+    item->partialMergedExtents = AVIF_TRUE;
 
     uint8_t * front = item->mergedExtents.data;
-    size_t remainingBytes = item->mergedExtents.size;
+    size_t totalBytesToRead = item->size;
+    if (partialByteCount && (totalBytesToRead > partialByteCount)) {
+        totalBytesToRead = partialByteCount;
+    }
+    size_t remainingBytes = totalBytesToRead;
     for (uint32_t extentIter = 0; extentIter < item->extents.count; ++extentIter) {
         avifDecoderItemExtent * extent = &item->extents.extent[extentIter];
-        if (extent->offset > offsetBuffer->size) {
-            return NULL;
-        }
-        uint64_t offsetSize = (uint64_t)extent->offset + (uint64_t)extent->size;
-        if (offsetSize > (uint64_t)offsetBuffer->size) {
-            return NULL;
-        }
-        if ((size_t)extent->size > remainingBytes) {
-            // This should be impossible?
-            return NULL;
+
+        size_t bytesToRead = extent->size;
+        if (bytesToRead > remainingBytes) {
+            bytesToRead = remainingBytes;
         }
 
-        memcpy(front, offsetBuffer->data + extent->offset, extent->size);
-        front += extent->size;
-        remainingBytes -= extent->size;
+        avifROData offsetBuffer;
+        if (idatBuffer) {
+            offsetBuffer.data = idatBuffer->data;
+            offsetBuffer.size = idatBuffer->size;
+        } else {
+            // construction_method: file(0)
+
+            avifResult readResult = decoder->io->read(decoder->io, 0, extent->offset, bytesToRead, &offsetBuffer);
+            if (readResult != AVIF_RESULT_OK) {
+                return readResult;
+            }
+            if (bytesToRead != offsetBuffer.size) {
+                return AVIF_RESULT_TRUNCATED_DATA;
+            }
+        }
+
+        if (bytesToRead > offsetBuffer.size) {
+            return AVIF_RESULT_TRUNCATED_DATA;
+        }
+
+        if (singlePersistentBuffer) {
+            memcpy(&item->mergedExtents, &offsetBuffer, sizeof(avifRWData));
+            item->mergedExtents.size = bytesToRead;
+        } else {
+            memcpy(front, offsetBuffer.data, bytesToRead);
+            front += bytesToRead;
+        }
+
+        remainingBytes -= bytesToRead;
+        if (remainingBytes == 0) {
+            // This happens when partialByteCount is set
+            break;
+        }
     }
     if (remainingBytes != 0) {
         // This should be impossible?
-        return NULL;
+        return AVIF_RESULT_TRUNCATED_DATA;
     }
-    return item->mergedExtents.data;
+
+    outData->data = item->mergedExtents.data;
+    outData->size = totalBytesToRead;
+    item->partialMergedExtents = (item->size != totalBytesToRead);
+    return AVIF_RESULT_OK;
 }
 
 static avifBool avifDecoderDataGenerateImageGridTiles(avifDecoderData * data, avifImageGrid * grid, avifDecoderItem * gridItem, avifBool alpha)
@@ -724,8 +768,9 @@
 
             avifTile * tile = avifDecoderDataCreateTile(data);
             avifDecodeSample * sample = (avifDecodeSample *)avifArrayPushPtr(&tile->input->samples);
-            sample->data.data = avifDecoderDataCalcItemPtr(data, item);
-            sample->data.size = item->size;
+            sample->itemID = item->id;
+            sample->offset = 0;
+            sample->size = item->size;
             sample->sync = AVIF_TRUE;
             tile->input->alpha = alpha;
 
@@ -909,10 +954,12 @@
 // If colorId == 0 (a sentinel value as item IDs must be nonzero), accept any found EXIF/XMP metadata. Passing in 0
 // is used when finding metadata in a meta box embedded in a trak box, as any items inside of a meta box that is
 // inside of a trak box are implicitly associated to the track.
-static avifBool avifDecoderDataFindMetadata(avifDecoderData * data, avifMeta * meta, avifImage * image, uint32_t colorId)
+static avifResult avifDecoderFindMetadata(avifDecoder * decoder, avifMeta * meta, avifImage * image, uint32_t colorId)
 {
-    avifROData exifData = AVIF_DATA_EMPTY;
-    avifROData xmpData = AVIF_DATA_EMPTY;
+    if (decoder->ignoreExif && decoder->ignoreXMP) {
+        // Nothing to do!
+        return AVIF_RESULT_OK;
+    }
 
     for (uint32_t itemIndex = 0; itemIndex < meta->items.count; ++itemIndex) {
         avifDecoderItem * item = &meta->items.item[itemIndex];
@@ -929,28 +976,31 @@
             continue;
         }
 
-        if (!memcmp(item->type, "Exif", 4)) {
+        if (!decoder->ignoreExif && !memcmp(item->type, "Exif", 4)) {
+            avifROData exifContents;
+            avifResult readResult = avifDecoderReadItem(decoder, item, &exifContents, 0);
+            if (readResult != AVIF_RESULT_OK) {
+                return readResult;
+            }
+
             // Advance past Annex A.2.1's header
-            const uint8_t * boxPtr = avifDecoderDataCalcItemPtr(data, item);
-            BEGIN_STREAM(exifBoxStream, boxPtr, item->size);
+            BEGIN_STREAM(exifBoxStream, exifContents.data, exifContents.size);
             uint32_t exifTiffHeaderOffset;
             CHECK(avifROStreamReadU32(&exifBoxStream, &exifTiffHeaderOffset)); // unsigned int(32) exif_tiff_header_offset;
 
-            exifData.data = avifROStreamCurrent(&exifBoxStream);
-            exifData.size = avifROStreamRemainingBytes(&exifBoxStream);
-        } else if (!memcmp(item->type, "mime", 4) && !memcmp(item->contentType.contentType, xmpContentType, xmpContentTypeSize)) {
-            xmpData.data = avifDecoderDataCalcItemPtr(data, item);
-            xmpData.size = item->size;
+            avifImageSetMetadataExif(image, avifROStreamCurrent(&exifBoxStream), avifROStreamRemainingBytes(&exifBoxStream));
+        } else if (!decoder->ignoreXMP && !memcmp(item->type, "mime", 4) &&
+                   !memcmp(item->contentType.contentType, xmpContentType, xmpContentTypeSize)) {
+            avifROData xmpContents;
+            avifResult readResult = avifDecoderReadItem(decoder, item, &xmpContents, 0);
+            if (readResult != AVIF_RESULT_OK) {
+                return readResult;
+            }
+
+            avifImageSetMetadataXMP(image, xmpContents.data, xmpContents.size);
         }
     }
-
-    if (exifData.data && exifData.size) {
-        avifImageSetMetadataExif(image, exifData.data, exifData.size);
-    }
-    if (xmpData.data && xmpData.size) {
-        avifImageSetMetadataXMP(image, xmpData.data, xmpData.size);
-    }
-    return AVIF_TRUE;
+    return AVIF_RESULT_OK;
 }
 
 // ---------------------------------------------------------------------------
@@ -1414,8 +1464,7 @@
     int index = avifArrayPushIndex(&meta->idats);
     avifDecoderItemData * idat = &meta->idats.idat[index];
     idat->id = meta->idatID;
-    idat->data.data = raw;
-    idat->data.size = rawLen;
+    avifRWDataSet(&idat->data, raw, rawLen);
     return AVIF_TRUE;
 }
 
@@ -1977,25 +2026,58 @@
     return AVIF_TRUE;
 }
 
-static avifBool avifParse(avifDecoderData * data, const uint8_t * raw, size_t rawLen)
+static avifResult avifParse(avifDecoder * decoder)
 {
-    BEGIN_STREAM(s, raw, rawLen);
+    avifResult readResult;
+    size_t parseOffset = 0;
+    avifDecoderData * data = decoder->data;
 
-    while (avifROStreamHasBytesLeft(&s, 1)) {
-        avifBoxHeader header;
-        CHECK(avifROStreamReadBoxHeader(&s, &header));
-
-        if (!memcmp(header.type, "ftyp", 4)) {
-            CHECK(avifParseFileTypeBox(&data->ftyp, avifROStreamCurrent(&s), header.size));
-        } else if (!memcmp(header.type, "meta", 4)) {
-            CHECK(avifParseMetaBox(data->meta, avifROStreamCurrent(&s), header.size));
-        } else if (!memcmp(header.type, "moov", 4)) {
-            CHECK(avifParseMoovBox(data, avifROStreamCurrent(&s), header.size));
+    for (;;) {
+        // Read just enough to get the next box header (a max of 32 bytes)
+        avifROData headerContents;
+        readResult = decoder->io->read(decoder->io, 0, parseOffset, 32, &headerContents);
+        if (readResult != AVIF_RESULT_OK) {
+            return readResult;
+        }
+        if (!headerContents.size) {
+            // If we got AVIF_RESULT_OK from the reader but received 0 bytes,
+            // This we've reached the end of the file with no errors. Hooray!
+            break;
         }
 
-        CHECK(avifROStreamSkip(&s, header.size));
+        // Parse the header, and find out how many bytes it actually was
+        BEGIN_STREAM(headerStream, headerContents.data, headerContents.size);
+        avifBoxHeader header;
+        CHECK(avifROStreamReadBoxHeader(&headerStream, &header));
+        parseOffset += headerStream.offset;
+
+        // Try to get the remainder of the box, if necessary
+        avifROData boxContents = AVIF_DATA_EMPTY;
+
+        // TODO: reorg this code to only do these memcmps once each
+        if (!memcmp(header.type, "ftyp", 4) || (!memcmp(header.type, "meta", 4) || !memcmp(header.type, "moov", 4))) {
+            readResult = decoder->io->read(decoder->io, 0, parseOffset, header.size, &boxContents);
+            if (readResult != AVIF_RESULT_OK) {
+                return readResult;
+            }
+            if (boxContents.size != header.size) {
+                // A truncated box, bail out
+                return AVIF_RESULT_BMFF_PARSE_FAILED;
+            }
+        }
+
+        if (!memcmp(header.type, "ftyp", 4)) {
+            avifRWDataSet(&data->ftypData, boxContents.data, boxContents.size);
+            CHECK(avifParseFileTypeBox(&data->ftyp, data->ftypData.data, data->ftypData.size));
+        } else if (!memcmp(header.type, "meta", 4)) {
+            CHECK(avifParseMetaBox(data->meta, boxContents.data, boxContents.size));
+        } else if (!memcmp(header.type, "moov", 4)) {
+            CHECK(avifParseMoovBox(data, boxContents.data, boxContents.size));
+        }
+
+        parseOffset += header.size;
     }
-    return AVIF_TRUE;
+    return AVIF_RESULT_OK;
 }
 
 // ---------------------------------------------------------------------------
@@ -2059,6 +2141,7 @@
 void avifDecoderDestroy(avifDecoder * decoder)
 {
     avifDecoderCleanup(decoder);
+    avifIODestroy(decoder->io);
     avifFree(decoder);
 }
 
@@ -2068,8 +2151,83 @@
     return avifDecoderReset(decoder);
 }
 
-avifResult avifDecoderParse(avifDecoder * decoder, const avifROData * rawInput)
+avifResult avifDecoderSetIO(avifDecoder * decoder, avifIO * io)
 {
+    avifIODestroy(decoder->io);
+    decoder->io = io;
+    return AVIF_RESULT_OK;
+}
+
+avifResult avifDecoderSetIOMemory(avifDecoder * decoder, const avifROData * rawInput)
+{
+    return avifDecoderSetIO(decoder, avifIOCreateMemoryReader(rawInput->data, rawInput->size));
+}
+
+avifResult avifDecoderSetIOFilePtr(avifDecoder * decoder, FILE * f, avifBool closeOnDestroy)
+{
+    return avifDecoderSetIO(decoder, avifIOCreateFilePtrReader(f, closeOnDestroy));
+}
+
+avifResult avifDecoderSetIOFile(avifDecoder * decoder, const char * filename)
+{
+    return avifDecoderSetIO(decoder, avifIOCreateFileReader(filename));
+}
+
+static avifResult avifDecoderPrepareSample(avifDecoder * decoder, avifDecodeSample * sample, size_t partialByteCount)
+{
+    if (!sample->data.size || sample->partialData) {
+        // This sample hasn't been read from IO or had its extents fully merged yet.
+
+        if (sample->itemID) {
+            // The data comes from an item. Let avifDecoderReadItem() do the heavy lifting.
+
+            avifDecoderItem * item = avifMetaFindItem(decoder->data->meta, sample->itemID);
+            avifROData itemContents;
+            avifResult readResult = avifDecoderReadItem(decoder, item, &itemContents, partialByteCount);
+            if (readResult != AVIF_RESULT_OK) {
+                return readResult;
+            }
+
+            // avifDecoderReadItem is guaranteed to already be persisted by either the underlying IO
+            // or by mergedExtents; just make reuse the buffer here.
+            memcpy(&sample->data, &itemContents, sizeof(avifROData));
+            sample->ownsData = AVIF_FALSE;
+            sample->partialData = item->partialMergedExtents;
+        } else {
+            // The data likely comes from a sample table. Pull the sample and make a copy if necessary.
+
+            size_t bytesToRead = sample->size;
+            if (partialByteCount && (bytesToRead > partialByteCount)) {
+                bytesToRead = partialByteCount;
+            }
+
+            avifROData sampleContents;
+            avifResult readResult = decoder->io->read(decoder->io, 0, sample->offset, bytesToRead, &sampleContents);
+            if (readResult != AVIF_RESULT_OK) {
+                return readResult;
+            }
+            if (sampleContents.size != bytesToRead) {
+                return AVIF_RESULT_TRUNCATED_DATA;
+            }
+
+            sample->ownsData = !decoder->io->persistent;
+            sample->partialData = (bytesToRead != sample->size);
+            if (decoder->io->persistent) {
+                memcpy(&sample->data, &sampleContents, sizeof(avifROData));
+            } else {
+                avifRWDataSet((avifRWData *)&sample->data, sampleContents.data, sampleContents.size);
+            }
+        }
+    }
+    return AVIF_RESULT_OK;
+}
+
+avifResult avifDecoderParse(avifDecoder * decoder)
+{
+    if (!decoder->io || !decoder->io->read) {
+        return AVIF_RESULT_NO_IO;
+    }
+
     // Cleanup anything lingering in the decoder
     avifDecoderCleanup(decoder);
 
@@ -2078,11 +2236,9 @@
 
     decoder->data = avifDecoderDataCreate();
 
-    // Shallow copy, on purpose
-    memcpy(&decoder->data->rawInput, rawInput, sizeof(avifROData));
-
-    if (!avifParse(decoder->data, decoder->data->rawInput.data, decoder->data->rawInput.size)) {
-        return AVIF_RESULT_BMFF_PARSE_FAILED;
+    avifResult parseResult = avifParse(decoder);
+    if (parseResult != AVIF_RESULT_OK) {
+        return parseResult;
     }
 
     avifBool avifCompatible = avifFileTypeIsCompatible(&decoder->data->ftyp);
@@ -2097,36 +2253,13 @@
             // An essential property isn't supported by libavif; ignore the item.
             continue;
         }
-        const uint8_t * p = avifDecoderDataCalcItemPtr(decoder->data, item);
-        if (p == NULL) {
-            return AVIF_RESULT_BMFF_PARSE_FAILED;
-        }
-    }
-
-    // Sanity check tracks
-    for (uint32_t trackIndex = 0; trackIndex < decoder->data->tracks.count; ++trackIndex) {
-        avifTrack * track = &decoder->data->tracks.track[trackIndex];
-        if (!track->sampleTable) {
-            continue;
-        }
-
-        for (uint32_t chunkIndex = 0; chunkIndex < track->sampleTable->chunks.count; ++chunkIndex) {
-            avifSampleTableChunk * chunk = &track->sampleTable->chunks.chunk[chunkIndex];
-            if (chunk->offset > decoder->data->rawInput.size) {
-                return AVIF_RESULT_BMFF_PARSE_FAILED;
-            }
-        }
     }
     return avifDecoderReset(decoder);
 }
 
-static avifCodec * avifCodecCreateInternal(avifCodecChoice choice, avifCodecDecodeInput * decodeInput)
+static avifCodec * avifCodecCreateInternal(avifCodecChoice choice)
 {
-    avifCodec * codec = avifCodecCreate(choice, AVIF_CODEC_FLAG_CAN_DECODE);
-    if (codec) {
-        codec->decodeInput = decodeInput;
-    }
-    return codec;
+    return avifCodecCreate(choice, AVIF_CODEC_FLAG_CAN_DECODE);
 }
 
 static avifResult avifDecoderFlush(avifDecoder * decoder)
@@ -2135,11 +2268,11 @@
 
     for (unsigned int i = 0; i < decoder->data->tiles.count; ++i) {
         avifTile * tile = &decoder->data->tiles.tile[i];
-        tile->codec = avifCodecCreateInternal(decoder->codecChoice, tile->input);
+        tile->codec = avifCodecCreateInternal(decoder->codecChoice);
         if (!tile->codec) {
             return AVIF_RESULT_NO_CODEC_AVAILABLE;
         }
-        if (!tile->codec->open(tile->codec, decoder->imageIndex + 1)) {
+        if (!tile->codec->open(tile->codec)) {
             return AVIF_RESULT_DECODE_COLOR_FAILED;
         }
     }
@@ -2221,9 +2354,10 @@
 
         // Find Exif and/or XMP metadata, if any
         if (colorTrack->meta) {
-            // See the comment above avifDecoderDataFindMetadata() for the explanation of using 0 here
-            if (!avifDecoderDataFindMetadata(data, colorTrack->meta, decoder->image, 0)) {
-                return AVIF_RESULT_BMFF_PARSE_FAILED;
+            // See the comment above avifDecoderFindMetadata() for the explanation of using 0 here
+            avifResult findResult = avifDecoderFindMetadata(decoder, colorTrack->meta, decoder->image, 0);
+            if (findResult != AVIF_RESULT_OK) {
+                return findResult;
             }
         }
 
@@ -2252,14 +2386,14 @@
         }
 
         avifTile * colorTile = avifDecoderDataCreateTile(data);
-        if (!avifCodecDecodeInputGetSamples(colorTile->input, colorTrack->sampleTable, &data->rawInput)) {
+        if (!avifCodecDecodeInputGetSamples(colorTile->input, colorTrack->sampleTable, decoder->io->sizeLimit)) {
             return AVIF_RESULT_BMFF_PARSE_FAILED;
         }
         data->colorTileCount = 1;
 
         if (alphaTrack) {
             avifTile * alphaTile = avifDecoderDataCreateTile(data);
-            if (!avifCodecDecodeInputGetSamples(alphaTile->input, alphaTrack->sampleTable, &data->rawInput)) {
+            if (!avifCodecDecodeInputGetSamples(alphaTile->input, alphaTrack->sampleTable, decoder->io->sizeLimit)) {
                 return AVIF_RESULT_BMFF_PARSE_FAILED;
             }
             alphaTile->input->alpha = AVIF_TRUE;
@@ -2287,10 +2421,8 @@
     } else {
         // Create from items
 
-        avifROData colorOBU = AVIF_DATA_EMPTY;
-        avifROData alphaOBU = AVIF_DATA_EMPTY;
-        avifDecoderItem * colorOBUItem = NULL;
-        avifDecoderItem * alphaOBUItem = NULL;
+        avifDecoderItem * colorItem = NULL;
+        avifDecoderItem * alphaItem = NULL;
 
         // Find the colorOBU (primary) item
         for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) {
@@ -2320,26 +2452,28 @@
                 if (decoder->disableGridImages) {
                     return AVIF_RESULT_BMFF_PARSE_FAILED;
                 }
-                const uint8_t * itemPtr = avifDecoderDataCalcItemPtr(data, item);
-                if (itemPtr == NULL) {
+
+                avifROData readData;
+                avifResult readResult = avifDecoderReadItem(decoder, item, &readData, 0);
+                if (readResult != AVIF_RESULT_OK) {
+                    return readResult;
+                }
+                if ((readData.data == NULL) || (readData.size != item->size)) {
                     return AVIF_RESULT_BMFF_PARSE_FAILED;
                 }
-                if (!avifParseImageGridBox(&data->colorGrid, itemPtr, item->size)) {
+                if (!avifParseImageGridBox(&data->colorGrid, readData.data, item->size)) {
                     return AVIF_RESULT_INVALID_IMAGE_GRID;
                 }
-            } else {
-                colorOBU.data = avifDecoderDataCalcItemPtr(data, item);
-                colorOBU.size = item->size;
             }
 
-            colorOBUItem = item;
+            colorItem = item;
             break;
         }
 
-        if (!colorOBUItem) {
+        if (!colorItem) {
             return AVIF_RESULT_NO_AV1_ITEMS_FOUND;
         }
-        colorProperties = &colorOBUItem->properties;
+        colorProperties = &colorItem->properties;
 
         // Find the alphaOBU item, if any
         for (uint32_t itemIndex = 0; itemIndex < data->meta->items.count; ++itemIndex) {
@@ -2357,66 +2491,73 @@
                 continue;
             }
 
-            // Is this an alpha auxiliary item of whatever we chose for colorOBUItem?
+            // Is this an alpha auxiliary item of whatever we chose for colorItem?
             const avifProperty * auxCProp = avifPropertyArrayFind(&item->properties, "auxC");
-            if (auxCProp && isAlphaURN(auxCProp->u.auxC.auxType) && (item->auxForID == colorOBUItem->id)) {
+            if (auxCProp && isAlphaURN(auxCProp->u.auxC.auxType) && (item->auxForID == colorItem->id)) {
                 if (isGrid) {
                     if (decoder->disableGridImages) {
                         return AVIF_RESULT_BMFF_PARSE_FAILED;
                     }
-                    const uint8_t * itemPtr = avifDecoderDataCalcItemPtr(data, item);
-                    if (itemPtr == NULL) {
+
+                    avifROData readData;
+                    avifResult readResult = avifDecoderReadItem(decoder, item, &readData, 0);
+                    if (readResult != AVIF_RESULT_OK) {
+                        return readResult;
+                    }
+                    if ((readData.data == NULL) || (readData.size != item->size)) {
                         return AVIF_RESULT_BMFF_PARSE_FAILED;
                     }
-                    if (!avifParseImageGridBox(&data->alphaGrid, itemPtr, item->size)) {
+                    if (!avifParseImageGridBox(&data->alphaGrid, readData.data, item->size)) {
                         return AVIF_RESULT_INVALID_IMAGE_GRID;
                     }
-                } else {
-                    alphaOBU.data = avifDecoderDataCalcItemPtr(data, item);
-                    alphaOBU.size = item->size;
                 }
 
-                alphaOBUItem = item;
+                alphaItem = item;
                 break;
             }
         }
 
         // Find Exif and/or XMP metadata, if any
-        if (!avifDecoderDataFindMetadata(data, data->meta, decoder->image, colorOBUItem->id)) {
-            return AVIF_RESULT_BMFF_PARSE_FAILED;
+        avifResult findResult = avifDecoderFindMetadata(decoder, data->meta, decoder->image, colorItem->id);
+        if (findResult != AVIF_RESULT_OK) {
+            return findResult;
         }
 
         if ((data->colorGrid.rows > 0) && (data->colorGrid.columns > 0)) {
-            if (!avifDecoderDataGenerateImageGridTiles(data, &data->colorGrid, colorOBUItem, AVIF_FALSE)) {
+            if (!avifDecoderDataGenerateImageGridTiles(data, &data->colorGrid, colorItem, AVIF_FALSE)) {
                 return AVIF_RESULT_INVALID_IMAGE_GRID;
             }
             data->colorTileCount = data->tiles.count;
         } else {
-            if (colorOBU.size == 0) {
+            if (colorItem->size == 0) {
                 return AVIF_RESULT_NO_AV1_ITEMS_FOUND;
             }
 
             avifTile * colorTile = avifDecoderDataCreateTile(data);
             avifDecodeSample * colorSample = (avifDecodeSample *)avifArrayPushPtr(&colorTile->input->samples);
-            memcpy(&colorSample->data, &colorOBU, sizeof(avifROData));
+            colorSample->itemID = colorItem->id;
+            colorSample->offset = 0;
+            colorSample->size = colorItem->size;
             colorSample->sync = AVIF_TRUE;
             data->colorTileCount = 1;
         }
 
-        if (alphaOBUItem) {
+        if (alphaItem) {
             if ((data->alphaGrid.rows > 0) && (data->alphaGrid.columns > 0)) {
-                if (!avifDecoderDataGenerateImageGridTiles(data, &data->alphaGrid, alphaOBUItem, AVIF_TRUE)) {
+                if (!avifDecoderDataGenerateImageGridTiles(data, &data->alphaGrid, alphaItem, AVIF_TRUE)) {
                     return AVIF_RESULT_INVALID_IMAGE_GRID;
                 }
                 data->alphaTileCount = data->tiles.count - data->colorTileCount;
             } else {
-                if (alphaOBU.size == 0) {
+                if (alphaItem->size == 0) {
                     return AVIF_RESULT_NO_AV1_ITEMS_FOUND;
                 }
 
                 avifTile * alphaTile = avifDecoderDataCreateTile(data);
                 avifDecodeSample * alphaSample = (avifDecodeSample *)avifArrayPushPtr(&alphaTile->input->samples);
-                memcpy(&alphaSample->data, &alphaOBU, sizeof(avifROData));
+                alphaSample->itemID = alphaItem->id;
+                alphaSample->offset = 0;
+                alphaSample->size = alphaItem->size;
                 alphaSample->sync = AVIF_TRUE;
                 alphaTile->input->alpha = AVIF_TRUE;
                 data->alphaTileCount = 1;
@@ -2435,8 +2576,8 @@
         decoder->duration = 1;
         decoder->durationInTimescales = 1;
 
-        decoder->ioStats.colorOBUSize = colorOBU.size;
-        decoder->ioStats.alphaOBUSize = alphaOBU.size;
+        decoder->ioStats.colorOBUSize = colorItem->size;
+        decoder->ioStats.alphaOBUSize = alphaItem ? alphaItem->size : 0;
 
         const avifProperty * ispeProp = avifPropertyArrayFind(colorProperties, "ispe");
         if (ispeProp) {
@@ -2446,7 +2587,7 @@
             decoder->image->width = 0;
             decoder->image->height = 0;
         }
-        decoder->alphaPresent = (alphaOBUItem != NULL);
+        decoder->alphaPresent = (alphaItem != NULL);
     }
 
     // Sanity check tiles
@@ -2454,7 +2595,7 @@
         avifTile * tile = &data->tiles.tile[tileIndex];
         for (uint32_t sampleIndex = 0; sampleIndex < tile->input->samples.count; ++sampleIndex) {
             avifDecodeSample * sample = &tile->input->samples.sample[sampleIndex];
-            if (!sample->data.data || !sample->data.size) {
+            if (!sample->size) {
                 // Every sample must have some data
                 return AVIF_RESULT_BMFF_PARSE_FAILED;
             }
@@ -2500,13 +2641,35 @@
         avifTile * firstTile = &data->tiles.tile[0];
         if (firstTile->input->samples.count > 0) {
             avifDecodeSample * sample = &firstTile->input->samples.sample[0];
-            avifSequenceHeader sequenceHeader;
-            if (avifSequenceHeaderParse(&sequenceHeader, &sample->data)) {
-                data->cicpSet = AVIF_TRUE;
-                decoder->image->colorPrimaries = sequenceHeader.colorPrimaries;
-                decoder->image->transferCharacteristics = sequenceHeader.transferCharacteristics;
-                decoder->image->matrixCoefficients = sequenceHeader.matrixCoefficients;
-                decoder->image->yuvRange = sequenceHeader.range;
+
+            // Harvest CICP from the AV1's sequence header, which should be very close to the front
+            // of the first sample. Read in successively larger chunks until we successfully parse the sequence.
+            static const size_t searchSampleChunkIncrement = 64;
+            size_t searchSampleSize = 0;
+            for (;;) {
+                searchSampleSize += searchSampleChunkIncrement;
+                if (searchSampleSize > sample->size) {
+                    searchSampleSize = sample->size;
+                }
+
+                avifResult prepareResult = avifDecoderPrepareSample(decoder, sample, searchSampleSize);
+                if (prepareResult != AVIF_RESULT_OK) {
+                    return prepareResult;
+                }
+
+                avifSequenceHeader sequenceHeader;
+                if (avifSequenceHeaderParse(&sequenceHeader, &sample->data)) {
+                    data->cicpSet = AVIF_TRUE;
+                    decoder->image->colorPrimaries = sequenceHeader.colorPrimaries;
+                    decoder->image->transferCharacteristics = sequenceHeader.transferCharacteristics;
+                    decoder->image->matrixCoefficients = sequenceHeader.matrixCoefficients;
+                    decoder->image->yuvRange = sequenceHeader.range;
+                    break;
+                }
+
+                if (searchSampleSize == sample->size) {
+                    break;
+                }
             }
         }
     }
@@ -2537,10 +2700,34 @@
 
 avifResult avifDecoderNextImage(avifDecoder * decoder)
 {
+    if (!decoder->io || !decoder->io->read) {
+        return AVIF_RESULT_NO_IO;
+    }
+
+    uint32_t nextImageIndex = (uint32_t)(decoder->imageIndex + 1);
+
+    // Acquire all sample data for the current image first, allowing for any read call to bail out
+    // with AVIF_RESULT_WAITING_ON_IO harmlessly / idempotently.
+    for (unsigned int tileIndex = 0; tileIndex < decoder->data->tiles.count; ++tileIndex) {
+        avifTile * tile = &decoder->data->tiles.tile[tileIndex];
+        if (nextImageIndex >= tile->input->samples.count) {
+            return AVIF_RESULT_NO_IMAGES_REMAINING;
+        }
+
+        avifDecodeSample * sample = &tile->input->samples.sample[nextImageIndex];
+        avifResult prepareResult = avifDecoderPrepareSample(decoder, sample, 0);
+        if (prepareResult != AVIF_RESULT_OK) {
+            return prepareResult;
+        }
+    }
+
+    // Decode decode all frames now that the sample data is ready.
     for (unsigned int tileIndex = 0; tileIndex < decoder->data->tiles.count; ++tileIndex) {
         avifTile * tile = &decoder->data->tiles.tile[tileIndex];
 
-        if (!tile->codec->getNextImage(tile->codec, tile->image)) {
+        avifDecodeSample * sample = &tile->input->samples.sample[nextImageIndex];
+
+        if (!tile->codec->getNextImage(tile->codec, sample, tile->input->alpha, tile->image)) {
             if (tile->input->alpha) {
                 return AVIF_RESULT_DECODE_ALPHA_FAILED;
             } else {
@@ -2620,7 +2807,7 @@
         }
     }
 
-    ++decoder->imageIndex;
+    decoder->imageIndex = nextImageIndex;
     if (decoder->data->sourceSampleTable) {
         // Decoding from a track! Provide timing information.
 
@@ -2722,9 +2909,9 @@
     return frameIndex;
 }
 
-avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, const avifROData * input)
+avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image)
 {
-    avifResult result = avifDecoderParse(decoder, input);
+    avifResult result = avifDecoderParse(decoder);
     if (result != AVIF_RESULT_OK) {
         return result;
     }
@@ -2735,3 +2922,21 @@
     avifImageCopy(image, decoder->image, AVIF_PLANES_ALL);
     return AVIF_RESULT_OK;
 }
+
+avifResult avifDecoderReadMemory(avifDecoder * decoder, avifImage * image, const avifROData * input)
+{
+    avifResult result = avifDecoderSetIOMemory(decoder, input);
+    if (result != AVIF_RESULT_OK) {
+        return result;
+    }
+    return avifDecoderRead(decoder, image);
+}
+
+avifResult avifDecoderReadFile(avifDecoder * decoder, avifImage * image, const char * filename)
+{
+    avifResult result = avifDecoderSetIOFile(decoder, filename);
+    if (result != AVIF_RESULT_OK) {
+        return result;
+    }
+    return avifDecoderRead(decoder, image);
+}
diff --git a/src/stream.c b/src/stream.c
index 3270061..bc8a7b4 100644
--- a/src/stream.c
+++ b/src/stream.c
@@ -165,11 +165,6 @@
         return AVIF_FALSE;
     }
     header->size = (size_t)(size - bytesRead);
-
-    // Make the assumption here that this box's contents must fit in the remaining portion of the parent stream
-    if (header->size > avifROStreamRemainingBytes(stream)) {
-        return AVIF_FALSE;
-    }
     return AVIF_TRUE;
 }