Palette_delta_encoding: compile fix.

s/uint16/uint16_t/

Issue introduced in https://aomedia-review.googlesource.com/c/11148/

Change-Id: I77cb5865828adef98976863fafa2ecbb907290a5
diff --git a/av1/decoder/decodemv.c b/av1/decoder/decodemv.c
index 2604ead..af226fa 100644
--- a/av1/decoder/decodemv.c
+++ b/av1/decoder/decodemv.c
@@ -648,8 +648,8 @@
 #if CONFIG_PALETTE_DELTA_ENCODING
 #if CONFIG_HIGHBITDEPTH
 static int uint16_compare(const void *a, const void *b) {
-  const uint16 va = *(const uint16 *)a;
-  const uint16 vb = *(const uint16 *)b;
+  const uint16_t va = *(const uint16_t *)a;
+  const uint16_t vb = *(const uint16_t *)b;
   return va - vb;
 }
 #else
diff --git a/av1/encoder/palette.c b/av1/encoder/palette.c
index ca2a54f..01250bb 100644
--- a/av1/encoder/palette.c
+++ b/av1/encoder/palette.c
@@ -196,7 +196,7 @@
                           const void *colors, int n_colors,
                           uint8_t *cache_color_found, int *out_cache_colors) {
 #if CONFIG_HIGHBITDEPTH
-  const uint16_t *colors_in = (const uint16 *)colors;
+  const uint16_t *colors_in = (const uint16_t *)colors;
 #else
   const uint8_t *colors_in = (const uint8_t *)colors;
 #endif  // CONFIG_HIGHBITDEPTH