blob: 538282deb7c7b5b61d4ee7a7a4b868bec128bbda [file] [log] [blame]
Steinar Midtskogen045d4132016-10-18 12:20:05 +02001/*
Krishna Rapaka7319db52021-09-28 20:35:29 -07002 * Copyright (c) 2021, Alliance for Open Media. All rights reserved
Steinar Midtskogen045d4132016-10-18 12:20:05 +02003 *
Vibhoothi41c6dd72021-10-12 18:48:26 +00004 * This source code is subject to the terms of the BSD 3-Clause Clear License
5 * and the Alliance for Open Media Patent License 1.0. If the BSD 3-Clause Clear
6 * License was not distributed with this source code in the LICENSE file, you
7 * can obtain it at aomedia.org/license/software-license/bsd-3-c-c/. If the
8 * Alliance for Open Media Patent License 1.0 was not distributed with this
9 * source code in the PATENTS file, you can obtain it at
10 * aomedia.org/license/patent-license/.
Steinar Midtskogen045d4132016-10-18 12:20:05 +020011 */
12
James Zerne1cbb132018-08-22 14:10:36 -070013#ifndef AOM_AOM_DSP_SIMD_V256_INTRINSICS_H_
14#define AOM_AOM_DSP_SIMD_V256_INTRINSICS_H_
Steinar Midtskogen045d4132016-10-18 12:20:05 +020015
16#include <stdio.h>
17#include <stdlib.h>
18#include <string.h>
Tom Finegandd3e2a52018-05-23 14:33:09 -070019
20#include "aom_dsp/simd/v256_intrinsics_c.h"
21#include "aom_dsp/simd/v128_intrinsics.h"
22#include "aom_dsp/simd/v64_intrinsics.h"
Steinar Midtskogen045d4132016-10-18 12:20:05 +020023
24/* Fallback to plain, unoptimised C. */
25
26typedef c_v256 v256;
27
28SIMD_INLINE uint32_t v256_low_u32(v256 a) { return c_v256_low_u32(a); }
29SIMD_INLINE v64 v256_low_v64(v256 a) { return c_v256_low_v64(a); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +020030SIMD_INLINE uint64_t v256_low_u64(v256 a) { return c_v256_low_u64(a); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +020031SIMD_INLINE v128 v256_low_v128(v256 a) { return c_v256_low_v128(a); }
32SIMD_INLINE v128 v256_high_v128(v256 a) { return c_v256_high_v128(a); }
33SIMD_INLINE v256 v256_from_v128(v128 hi, v128 lo) {
34 return c_v256_from_v128(hi, lo);
35}
36SIMD_INLINE v256 v256_from_64(uint64_t a, uint64_t b, uint64_t c, uint64_t d) {
37 return c_v256_from_64(a, b, c, d);
38}
39SIMD_INLINE v256 v256_from_v64(v64 a, v64 b, v64 c, v64 d) {
40 return c_v256_from_v64(a, b, c, d);
41}
42
43SIMD_INLINE v256 v256_load_unaligned(const void *p) {
44 return c_v256_load_unaligned(p);
45}
46SIMD_INLINE v256 v256_load_aligned(const void *p) {
47 return c_v256_load_aligned(p);
48}
49
50SIMD_INLINE void v256_store_unaligned(void *p, v256 a) {
51 c_v256_store_unaligned(p, a);
52}
53SIMD_INLINE void v256_store_aligned(void *p, v256 a) {
54 c_v256_store_aligned(p, a);
55}
56
Yaowu Xu032573d2017-04-24 15:04:17 -070057SIMD_INLINE v256 v256_align(v256 a, v256 b, unsigned int c) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020058 return c_v256_align(a, b, c);
59}
60
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010061SIMD_INLINE v256 v256_zero(void) { return c_v256_zero(); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +020062SIMD_INLINE v256 v256_dup_8(uint8_t x) { return c_v256_dup_8(x); }
63SIMD_INLINE v256 v256_dup_16(uint16_t x) { return c_v256_dup_16(x); }
64SIMD_INLINE v256 v256_dup_32(uint32_t x) { return c_v256_dup_32(x); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +020065SIMD_INLINE v256 v256_dup_64(uint64_t x) { return c_v256_dup_64(x); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +020066
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010067SIMD_INLINE c_sad256_internal v256_sad_u8_init(void) {
68 return c_v256_sad_u8_init();
69}
70SIMD_INLINE c_sad256_internal v256_sad_u8(c_sad256_internal s, v256 a, v256 b) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020071 return c_v256_sad_u8(s, a, b);
72}
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010073SIMD_INLINE uint32_t v256_sad_u8_sum(c_sad256_internal s) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020074 return c_v256_sad_u8_sum(s);
75}
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010076SIMD_INLINE c_ssd256_internal v256_ssd_u8_init(void) {
77 return c_v256_ssd_u8_init();
78}
79SIMD_INLINE c_ssd256_internal v256_ssd_u8(c_ssd256_internal s, v256 a, v256 b) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020080 return c_v256_ssd_u8(s, a, b);
81}
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010082SIMD_INLINE uint32_t v256_ssd_u8_sum(c_ssd256_internal s) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020083 return c_v256_ssd_u8_sum(s);
84}
Steinar Midtskogen0578d432018-05-28 14:47:36 +020085
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010086SIMD_INLINE c_ssd256_internal_s16 v256_ssd_s16_init(void) {
87 return c_v256_ssd_s16_init();
88}
89SIMD_INLINE c_ssd256_internal_s16 v256_ssd_s16(c_ssd256_internal_s16 s, v256 a,
90 v256 b) {
91 return c_v256_ssd_s16(s, a, b);
92}
Wan-Teh Change63182c2020-03-29 08:43:13 -070093SIMD_INLINE uint64_t v256_ssd_s16_sum(c_ssd256_internal_s16 s) {
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010094 return c_v256_ssd_s16_sum(s);
95}
96
Steinar Midtskogen0578d432018-05-28 14:47:36 +020097SIMD_INLINE int64_t v256_dotp_su8(v256 a, v256 b) {
98 return c_v256_dotp_su8(a, b);
99}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200100SIMD_INLINE int64_t v256_dotp_s16(v256 a, v256 b) {
101 return c_v256_dotp_s16(a, b);
102}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200103SIMD_INLINE int64_t v256_dotp_s32(v256 a, v256 b) {
104 return c_v256_dotp_s32(a, b);
105}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200106SIMD_INLINE uint64_t v256_hadd_u8(v256 a) { return c_v256_hadd_u8(a); }
107
108SIMD_INLINE v256 v256_or(v256 a, v256 b) { return c_v256_or(a, b); }
109SIMD_INLINE v256 v256_xor(v256 a, v256 b) { return c_v256_xor(a, b); }
110SIMD_INLINE v256 v256_and(v256 a, v256 b) { return c_v256_and(a, b); }
111SIMD_INLINE v256 v256_andn(v256 a, v256 b) { return c_v256_andn(a, b); }
112
113SIMD_INLINE v256 v256_add_8(v256 a, v256 b) { return c_v256_add_8(a, b); }
114SIMD_INLINE v256 v256_add_16(v256 a, v256 b) { return c_v256_add_16(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200115SIMD_INLINE v256 v256_sadd_s8(v256 a, v256 b) { return c_v256_sadd_s8(a, b); }
116SIMD_INLINE v256 v256_sadd_u8(v256 a, v256 b) { return c_v256_sadd_u8(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200117SIMD_INLINE v256 v256_sadd_s16(v256 a, v256 b) { return c_v256_sadd_s16(a, b); }
118SIMD_INLINE v256 v256_add_32(v256 a, v256 b) { return c_v256_add_32(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200119SIMD_INLINE v256 v256_add_64(v256 a, v256 b) { return c_v256_add_64(a, b); }
120SIMD_INLINE v256 v256_sub_64(v256 a, v256 b) { return c_v256_sub_64(a, b); }
121SIMD_INLINE v256 v256_padd_u8(v256 a) { return c_v256_padd_u8(a); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200122SIMD_INLINE v256 v256_padd_s16(v256 a) { return c_v256_padd_s16(a); }
123SIMD_INLINE v256 v256_sub_8(v256 a, v256 b) { return c_v256_sub_8(a, b); }
124SIMD_INLINE v256 v256_ssub_u8(v256 a, v256 b) { return c_v256_ssub_u8(a, b); }
125SIMD_INLINE v256 v256_ssub_s8(v256 a, v256 b) { return c_v256_ssub_s8(a, b); }
126SIMD_INLINE v256 v256_sub_16(v256 a, v256 b) { return c_v256_sub_16(a, b); }
127SIMD_INLINE v256 v256_ssub_s16(v256 a, v256 b) { return c_v256_ssub_s16(a, b); }
Steinar Midtskogen9b8444a2017-03-31 22:11:06 +0200128SIMD_INLINE v256 v256_ssub_u16(v256 a, v256 b) { return c_v256_ssub_u16(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200129SIMD_INLINE v256 v256_sub_32(v256 a, v256 b) { return c_v256_sub_32(a, b); }
130SIMD_INLINE v256 v256_abs_s16(v256 a) { return c_v256_abs_s16(a); }
Steinar Midtskogen6033fb82017-04-02 21:32:41 +0200131SIMD_INLINE v256 v256_abs_s8(v256 a) { return c_v256_abs_s8(a); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200132
133SIMD_INLINE v256 v256_mul_s16(v128 a, v128 b) { return c_v256_mul_s16(a, b); }
134SIMD_INLINE v256 v256_mullo_s16(v256 a, v256 b) {
135 return c_v256_mullo_s16(a, b);
136}
137SIMD_INLINE v256 v256_mulhi_s16(v256 a, v256 b) {
138 return c_v256_mulhi_s16(a, b);
139}
140SIMD_INLINE v256 v256_mullo_s32(v256 a, v256 b) {
141 return c_v256_mullo_s32(a, b);
142}
143SIMD_INLINE v256 v256_madd_s16(v256 a, v256 b) { return c_v256_madd_s16(a, b); }
144SIMD_INLINE v256 v256_madd_us8(v256 a, v256 b) { return c_v256_madd_us8(a, b); }
145
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200146SIMD_INLINE uint32_t v256_movemask_8(v256 a) { return c_v256_movemask_8(a); }
147SIMD_INLINE v256 v256_blend_8(v256 a, v256 b, v256 c) {
148 return c_v256_blend_8(a, b, c);
149}
150
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200151SIMD_INLINE v256 v256_avg_u8(v256 a, v256 b) { return c_v256_avg_u8(a, b); }
152SIMD_INLINE v256 v256_rdavg_u8(v256 a, v256 b) { return c_v256_rdavg_u8(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200153SIMD_INLINE v256 v256_rdavg_u16(v256 a, v256 b) {
154 return c_v256_rdavg_u16(a, b);
155}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200156SIMD_INLINE v256 v256_avg_u16(v256 a, v256 b) { return c_v256_avg_u16(a, b); }
157SIMD_INLINE v256 v256_min_u8(v256 a, v256 b) { return c_v256_min_u8(a, b); }
158SIMD_INLINE v256 v256_max_u8(v256 a, v256 b) { return c_v256_max_u8(a, b); }
159SIMD_INLINE v256 v256_min_s8(v256 a, v256 b) { return c_v256_min_s8(a, b); }
160SIMD_INLINE v256 v256_max_s8(v256 a, v256 b) { return c_v256_max_s8(a, b); }
161SIMD_INLINE v256 v256_min_s16(v256 a, v256 b) { return c_v256_min_s16(a, b); }
162SIMD_INLINE v256 v256_max_s16(v256 a, v256 b) { return c_v256_max_s16(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200163SIMD_INLINE v256 v256_min_s32(v256 a, v256 b) { return c_v256_min_s32(a, b); }
164SIMD_INLINE v256 v256_max_s32(v256 a, v256 b) { return c_v256_max_s32(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200165
166SIMD_INLINE v256 v256_ziplo_8(v256 a, v256 b) { return c_v256_ziplo_8(a, b); }
167SIMD_INLINE v256 v256_ziphi_8(v256 a, v256 b) { return c_v256_ziphi_8(a, b); }
168SIMD_INLINE v256 v256_ziplo_16(v256 a, v256 b) { return c_v256_ziplo_16(a, b); }
169SIMD_INLINE v256 v256_ziphi_16(v256 a, v256 b) { return c_v256_ziphi_16(a, b); }
170SIMD_INLINE v256 v256_ziplo_32(v256 a, v256 b) { return c_v256_ziplo_32(a, b); }
171SIMD_INLINE v256 v256_ziphi_32(v256 a, v256 b) { return c_v256_ziphi_32(a, b); }
172SIMD_INLINE v256 v256_ziplo_64(v256 a, v256 b) { return c_v256_ziplo_64(a, b); }
173SIMD_INLINE v256 v256_ziphi_64(v256 a, v256 b) { return c_v256_ziphi_64(a, b); }
174SIMD_INLINE v256 v256_ziplo_128(v256 a, v256 b) {
175 return c_v256_ziplo_128(a, b);
176}
177SIMD_INLINE v256 v256_ziphi_128(v256 a, v256 b) {
178 return c_v256_ziphi_128(a, b);
179}
180SIMD_INLINE v256 v256_zip_8(v128 a, v128 b) { return c_v256_zip_8(a, b); }
181SIMD_INLINE v256 v256_zip_16(v128 a, v128 b) { return c_v256_zip_16(a, b); }
182SIMD_INLINE v256 v256_zip_32(v128 a, v128 b) { return c_v256_zip_32(a, b); }
183SIMD_INLINE v256 v256_unziplo_8(v256 a, v256 b) {
184 return c_v256_unziplo_8(a, b);
185}
186SIMD_INLINE v256 v256_unziphi_8(v256 a, v256 b) {
187 return c_v256_unziphi_8(a, b);
188}
189SIMD_INLINE v256 v256_unziplo_16(v256 a, v256 b) {
190 return c_v256_unziplo_16(a, b);
191}
192SIMD_INLINE v256 v256_unziphi_16(v256 a, v256 b) {
193 return c_v256_unziphi_16(a, b);
194}
195SIMD_INLINE v256 v256_unziplo_32(v256 a, v256 b) {
196 return c_v256_unziplo_32(a, b);
197}
198SIMD_INLINE v256 v256_unziphi_32(v256 a, v256 b) {
199 return c_v256_unziphi_32(a, b);
200}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200201SIMD_INLINE v256 v256_unziplo_64(v256 a, v256 b) {
202 return c_v256_unziplo_64(a, b);
203}
204SIMD_INLINE v256 v256_unziphi_64(v256 a, v256 b) {
205 return c_v256_unziphi_64(a, b);
206}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200207SIMD_INLINE v256 v256_unpack_u8_s16(v128 a) { return c_v256_unpack_u8_s16(a); }
208SIMD_INLINE v256 v256_unpacklo_u8_s16(v256 a) {
209 return c_v256_unpacklo_u8_s16(a);
210}
211SIMD_INLINE v256 v256_unpackhi_u8_s16(v256 a) {
212 return c_v256_unpackhi_u8_s16(a);
213}
Steinar Midtskogen1b2b7392017-04-11 14:19:20 +0200214SIMD_INLINE v256 v256_unpack_s8_s16(v128 a) { return c_v256_unpack_s8_s16(a); }
215SIMD_INLINE v256 v256_unpacklo_s8_s16(v256 a) {
216 return c_v256_unpacklo_s8_s16(a);
217}
218SIMD_INLINE v256 v256_unpackhi_s8_s16(v256 a) {
219 return c_v256_unpackhi_s8_s16(a);
220}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200221SIMD_INLINE v256 v256_pack_s32_s16(v256 a, v256 b) {
222 return c_v256_pack_s32_s16(a, b);
223}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200224SIMD_INLINE v256 v256_pack_s32_u16(v256 a, v256 b) {
225 return c_v256_pack_s32_u16(a, b);
226}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200227SIMD_INLINE v256 v256_pack_s16_u8(v256 a, v256 b) {
228 return c_v256_pack_s16_u8(a, b);
229}
230SIMD_INLINE v256 v256_pack_s16_s8(v256 a, v256 b) {
231 return c_v256_pack_s16_s8(a, b);
232}
233SIMD_INLINE v256 v256_unpack_u16_s32(v128 a) {
234 return c_v256_unpack_u16_s32(a);
235}
236SIMD_INLINE v256 v256_unpack_s16_s32(v128 a) {
237 return c_v256_unpack_s16_s32(a);
238}
239SIMD_INLINE v256 v256_unpacklo_u16_s32(v256 a) {
240 return c_v256_unpacklo_u16_s32(a);
241}
242SIMD_INLINE v256 v256_unpacklo_s16_s32(v256 a) {
243 return c_v256_unpacklo_s16_s32(a);
244}
245SIMD_INLINE v256 v256_unpackhi_u16_s32(v256 a) {
246 return c_v256_unpackhi_u16_s32(a);
247}
248SIMD_INLINE v256 v256_unpackhi_s16_s32(v256 a) {
249 return c_v256_unpackhi_s16_s32(a);
250}
251SIMD_INLINE v256 v256_shuffle_8(v256 a, v256 pattern) {
252 return c_v256_shuffle_8(a, pattern);
253}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200254SIMD_INLINE v256 v256_wideshuffle_8(v256 a, v256 b, v256 pattern) {
255 return c_v256_wideshuffle_8(a, b, pattern);
256}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200257SIMD_INLINE v256 v256_pshuffle_8(v256 a, v256 pattern) {
258 return c_v256_pshuffle_8(a, pattern);
259}
260
261SIMD_INLINE v256 v256_cmpgt_s8(v256 a, v256 b) { return c_v256_cmpgt_s8(a, b); }
262SIMD_INLINE v256 v256_cmplt_s8(v256 a, v256 b) { return c_v256_cmplt_s8(a, b); }
263SIMD_INLINE v256 v256_cmpeq_8(v256 a, v256 b) { return c_v256_cmpeq_8(a, b); }
264SIMD_INLINE v256 v256_cmpgt_s16(v256 a, v256 b) {
265 return c_v256_cmpgt_s16(a, b);
266}
267SIMD_INLINE v256 v256_cmplt_s16(v256 a, v256 b) {
268 return c_v256_cmplt_s16(a, b);
269}
270SIMD_INLINE v256 v256_cmpeq_16(v256 a, v256 b) { return c_v256_cmpeq_16(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200271SIMD_INLINE v256 v256_cmpeq_32(v256 a, v256 b) { return c_v256_cmpeq_32(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200272
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200273SIMD_INLINE v256 v256_cmpgt_s32(v256 a, v256 b) {
274 return c_v256_cmpgt_s32(a, b);
275}
276SIMD_INLINE v256 v256_cmplt_s32(v256 a, v256 b) {
277 return c_v256_cmplt_s32(a, b);
278}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200279SIMD_INLINE v256 v256_shl_8(v256 a, unsigned int c) {
280 return c_v256_shl_8(a, c);
281}
282SIMD_INLINE v256 v256_shr_u8(v256 a, unsigned int c) {
283 return c_v256_shr_u8(a, c);
284}
285SIMD_INLINE v256 v256_shr_s8(v256 a, unsigned int c) {
286 return c_v256_shr_s8(a, c);
287}
288SIMD_INLINE v256 v256_shl_16(v256 a, unsigned int c) {
289 return c_v256_shl_16(a, c);
290}
291SIMD_INLINE v256 v256_shr_u16(v256 a, unsigned int c) {
292 return c_v256_shr_u16(a, c);
293}
294SIMD_INLINE v256 v256_shr_s16(v256 a, unsigned int c) {
295 return c_v256_shr_s16(a, c);
296}
297SIMD_INLINE v256 v256_shl_32(v256 a, unsigned int c) {
298 return c_v256_shl_32(a, c);
299}
300SIMD_INLINE v256 v256_shr_u32(v256 a, unsigned int c) {
301 return c_v256_shr_u32(a, c);
302}
303SIMD_INLINE v256 v256_shr_s32(v256 a, unsigned int c) {
304 return c_v256_shr_s32(a, c);
305}
Steinar Midtskogend8f95212018-06-27 19:41:11 +0200306SIMD_INLINE v256 v256_shl_64(v256 a, unsigned int c) {
307 return c_v256_shl_64(a, c);
308}
309SIMD_INLINE v256 v256_shr_u64(v256 a, unsigned int c) {
310 return c_v256_shr_u64(a, c);
311}
312SIMD_INLINE v256 v256_shr_s64(v256 a, unsigned int c) {
313 return c_v256_shr_s64(a, c);
314}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200315
Yaowu Xu032573d2017-04-24 15:04:17 -0700316SIMD_INLINE v256 v256_shr_n_byte(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200317 return c_v256_shr_n_byte(a, n);
318}
Yaowu Xu032573d2017-04-24 15:04:17 -0700319SIMD_INLINE v256 v256_shl_n_byte(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200320 return c_v256_shl_n_byte(a, n);
321}
Yaowu Xu032573d2017-04-24 15:04:17 -0700322SIMD_INLINE v256 v256_shl_n_8(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200323 return c_v256_shl_n_8(a, n);
324}
Yaowu Xu032573d2017-04-24 15:04:17 -0700325SIMD_INLINE v256 v256_shl_n_16(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200326 return c_v256_shl_n_16(a, n);
327}
Yaowu Xu032573d2017-04-24 15:04:17 -0700328SIMD_INLINE v256 v256_shl_n_32(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200329 return c_v256_shl_n_32(a, n);
330}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200331SIMD_INLINE v256 v256_shl_n_64(v256 a, unsigned int n) {
332 return c_v256_shl_n_64(a, n);
333}
Yaowu Xu032573d2017-04-24 15:04:17 -0700334SIMD_INLINE v256 v256_shr_n_u8(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200335 return c_v256_shr_n_u8(a, n);
336}
Yaowu Xu032573d2017-04-24 15:04:17 -0700337SIMD_INLINE v256 v256_shr_n_u16(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200338 return c_v256_shr_n_u16(a, n);
339}
Yaowu Xu032573d2017-04-24 15:04:17 -0700340SIMD_INLINE v256 v256_shr_n_u32(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200341 return c_v256_shr_n_u32(a, n);
342}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200343SIMD_INLINE v256 v256_shr_n_u64(v256 a, unsigned int n) {
344 return c_v256_shr_n_u64(a, n);
345}
Yaowu Xu032573d2017-04-24 15:04:17 -0700346SIMD_INLINE v256 v256_shr_n_s8(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200347 return c_v256_shr_n_s8(a, n);
348}
Yaowu Xu032573d2017-04-24 15:04:17 -0700349SIMD_INLINE v256 v256_shr_n_s16(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200350 return c_v256_shr_n_s16(a, n);
351}
Yaowu Xu032573d2017-04-24 15:04:17 -0700352SIMD_INLINE v256 v256_shr_n_s32(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200353 return c_v256_shr_n_s32(a, n);
354}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200355SIMD_INLINE v256 v256_shr_n_s64(v256 a, unsigned int n) {
356 return c_v256_shr_n_s64(a, n);
357}
358
359SIMD_INLINE v256 v256_shr_n_word(v256 a, unsigned int n) {
360 return c_v256_shr_n_word(a, n);
361}
362SIMD_INLINE v256 v256_shl_n_word(v256 a, unsigned int n) {
363 return c_v256_shl_n_word(a, n);
364}
365
366typedef uint32_t sad256_internal_u16;
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +0100367SIMD_INLINE sad256_internal_u16 v256_sad_u16_init(void) {
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200368 return c_v256_sad_u16_init();
369}
370SIMD_INLINE sad256_internal_u16 v256_sad_u16(sad256_internal_u16 s, v256 a,
371 v256 b) {
372 return c_v256_sad_u16(s, a, b);
373}
374SIMD_INLINE uint32_t v256_sad_u16_sum(sad256_internal_u16 s) {
375 return c_v256_sad_u16_sum(s);
376}
377
James Zerne1cbb132018-08-22 14:10:36 -0700378#endif // AOM_AOM_DSP_SIMD_V256_INTRINSICS_H_