blob: 9a0d4fbdd9067da96b039a1764989df80d6ecbb3 [file] [log] [blame]
Steinar Midtskogen045d4132016-10-18 12:20:05 +02001/*
Krishna Rapaka7319db52021-09-28 20:35:29 -07002 * Copyright (c) 2021, Alliance for Open Media. All rights reserved
Steinar Midtskogen045d4132016-10-18 12:20:05 +02003 *
Krishna Rapaka7319db52021-09-28 20:35:29 -07004 * This source code is subject to the terms of the BSD 3-Clause Clear License and the
5 * Alliance for Open Media Patent License 1.0. If the BSD 3-Clause Clear License was
6 * not distributed with this source code in the LICENSE file, you can obtain it
7 * at aomedia.org/license/software-license/bsd-3-c-c/. If the Alliance for Open Media Patent
8 * License 1.0 was not distributed with this source code in the PATENTS file, you
9 * can obtain it at aomedia.org/license/patent-license/.
Steinar Midtskogen045d4132016-10-18 12:20:05 +020010 */
11
James Zerne1cbb132018-08-22 14:10:36 -070012#ifndef AOM_AOM_DSP_SIMD_V256_INTRINSICS_H_
13#define AOM_AOM_DSP_SIMD_V256_INTRINSICS_H_
Steinar Midtskogen045d4132016-10-18 12:20:05 +020014
15#include <stdio.h>
16#include <stdlib.h>
17#include <string.h>
Tom Finegandd3e2a52018-05-23 14:33:09 -070018
19#include "aom_dsp/simd/v256_intrinsics_c.h"
20#include "aom_dsp/simd/v128_intrinsics.h"
21#include "aom_dsp/simd/v64_intrinsics.h"
Steinar Midtskogen045d4132016-10-18 12:20:05 +020022
23/* Fallback to plain, unoptimised C. */
24
25typedef c_v256 v256;
26
27SIMD_INLINE uint32_t v256_low_u32(v256 a) { return c_v256_low_u32(a); }
28SIMD_INLINE v64 v256_low_v64(v256 a) { return c_v256_low_v64(a); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +020029SIMD_INLINE uint64_t v256_low_u64(v256 a) { return c_v256_low_u64(a); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +020030SIMD_INLINE v128 v256_low_v128(v256 a) { return c_v256_low_v128(a); }
31SIMD_INLINE v128 v256_high_v128(v256 a) { return c_v256_high_v128(a); }
32SIMD_INLINE v256 v256_from_v128(v128 hi, v128 lo) {
33 return c_v256_from_v128(hi, lo);
34}
35SIMD_INLINE v256 v256_from_64(uint64_t a, uint64_t b, uint64_t c, uint64_t d) {
36 return c_v256_from_64(a, b, c, d);
37}
38SIMD_INLINE v256 v256_from_v64(v64 a, v64 b, v64 c, v64 d) {
39 return c_v256_from_v64(a, b, c, d);
40}
41
42SIMD_INLINE v256 v256_load_unaligned(const void *p) {
43 return c_v256_load_unaligned(p);
44}
45SIMD_INLINE v256 v256_load_aligned(const void *p) {
46 return c_v256_load_aligned(p);
47}
48
49SIMD_INLINE void v256_store_unaligned(void *p, v256 a) {
50 c_v256_store_unaligned(p, a);
51}
52SIMD_INLINE void v256_store_aligned(void *p, v256 a) {
53 c_v256_store_aligned(p, a);
54}
55
Yaowu Xu032573d2017-04-24 15:04:17 -070056SIMD_INLINE v256 v256_align(v256 a, v256 b, unsigned int c) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020057 return c_v256_align(a, b, c);
58}
59
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010060SIMD_INLINE v256 v256_zero(void) { return c_v256_zero(); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +020061SIMD_INLINE v256 v256_dup_8(uint8_t x) { return c_v256_dup_8(x); }
62SIMD_INLINE v256 v256_dup_16(uint16_t x) { return c_v256_dup_16(x); }
63SIMD_INLINE v256 v256_dup_32(uint32_t x) { return c_v256_dup_32(x); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +020064SIMD_INLINE v256 v256_dup_64(uint64_t x) { return c_v256_dup_64(x); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +020065
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010066SIMD_INLINE c_sad256_internal v256_sad_u8_init(void) {
67 return c_v256_sad_u8_init();
68}
69SIMD_INLINE c_sad256_internal v256_sad_u8(c_sad256_internal s, v256 a, v256 b) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020070 return c_v256_sad_u8(s, a, b);
71}
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010072SIMD_INLINE uint32_t v256_sad_u8_sum(c_sad256_internal s) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020073 return c_v256_sad_u8_sum(s);
74}
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010075SIMD_INLINE c_ssd256_internal v256_ssd_u8_init(void) {
76 return c_v256_ssd_u8_init();
77}
78SIMD_INLINE c_ssd256_internal v256_ssd_u8(c_ssd256_internal s, v256 a, v256 b) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020079 return c_v256_ssd_u8(s, a, b);
80}
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010081SIMD_INLINE uint32_t v256_ssd_u8_sum(c_ssd256_internal s) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +020082 return c_v256_ssd_u8_sum(s);
83}
Steinar Midtskogen0578d432018-05-28 14:47:36 +020084
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010085SIMD_INLINE c_ssd256_internal_s16 v256_ssd_s16_init(void) {
86 return c_v256_ssd_s16_init();
87}
88SIMD_INLINE c_ssd256_internal_s16 v256_ssd_s16(c_ssd256_internal_s16 s, v256 a,
89 v256 b) {
90 return c_v256_ssd_s16(s, a, b);
91}
Wan-Teh Change63182c2020-03-29 08:43:13 -070092SIMD_INLINE uint64_t v256_ssd_s16_sum(c_ssd256_internal_s16 s) {
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +010093 return c_v256_ssd_s16_sum(s);
94}
95
Steinar Midtskogen0578d432018-05-28 14:47:36 +020096SIMD_INLINE int64_t v256_dotp_su8(v256 a, v256 b) {
97 return c_v256_dotp_su8(a, b);
98}
Steinar Midtskogen045d4132016-10-18 12:20:05 +020099SIMD_INLINE int64_t v256_dotp_s16(v256 a, v256 b) {
100 return c_v256_dotp_s16(a, b);
101}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200102SIMD_INLINE int64_t v256_dotp_s32(v256 a, v256 b) {
103 return c_v256_dotp_s32(a, b);
104}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200105SIMD_INLINE uint64_t v256_hadd_u8(v256 a) { return c_v256_hadd_u8(a); }
106
107SIMD_INLINE v256 v256_or(v256 a, v256 b) { return c_v256_or(a, b); }
108SIMD_INLINE v256 v256_xor(v256 a, v256 b) { return c_v256_xor(a, b); }
109SIMD_INLINE v256 v256_and(v256 a, v256 b) { return c_v256_and(a, b); }
110SIMD_INLINE v256 v256_andn(v256 a, v256 b) { return c_v256_andn(a, b); }
111
112SIMD_INLINE v256 v256_add_8(v256 a, v256 b) { return c_v256_add_8(a, b); }
113SIMD_INLINE v256 v256_add_16(v256 a, v256 b) { return c_v256_add_16(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200114SIMD_INLINE v256 v256_sadd_s8(v256 a, v256 b) { return c_v256_sadd_s8(a, b); }
115SIMD_INLINE v256 v256_sadd_u8(v256 a, v256 b) { return c_v256_sadd_u8(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200116SIMD_INLINE v256 v256_sadd_s16(v256 a, v256 b) { return c_v256_sadd_s16(a, b); }
117SIMD_INLINE v256 v256_add_32(v256 a, v256 b) { return c_v256_add_32(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200118SIMD_INLINE v256 v256_add_64(v256 a, v256 b) { return c_v256_add_64(a, b); }
119SIMD_INLINE v256 v256_sub_64(v256 a, v256 b) { return c_v256_sub_64(a, b); }
120SIMD_INLINE v256 v256_padd_u8(v256 a) { return c_v256_padd_u8(a); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200121SIMD_INLINE v256 v256_padd_s16(v256 a) { return c_v256_padd_s16(a); }
122SIMD_INLINE v256 v256_sub_8(v256 a, v256 b) { return c_v256_sub_8(a, b); }
123SIMD_INLINE v256 v256_ssub_u8(v256 a, v256 b) { return c_v256_ssub_u8(a, b); }
124SIMD_INLINE v256 v256_ssub_s8(v256 a, v256 b) { return c_v256_ssub_s8(a, b); }
125SIMD_INLINE v256 v256_sub_16(v256 a, v256 b) { return c_v256_sub_16(a, b); }
126SIMD_INLINE v256 v256_ssub_s16(v256 a, v256 b) { return c_v256_ssub_s16(a, b); }
Steinar Midtskogen9b8444a2017-03-31 22:11:06 +0200127SIMD_INLINE v256 v256_ssub_u16(v256 a, v256 b) { return c_v256_ssub_u16(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200128SIMD_INLINE v256 v256_sub_32(v256 a, v256 b) { return c_v256_sub_32(a, b); }
129SIMD_INLINE v256 v256_abs_s16(v256 a) { return c_v256_abs_s16(a); }
Steinar Midtskogen6033fb82017-04-02 21:32:41 +0200130SIMD_INLINE v256 v256_abs_s8(v256 a) { return c_v256_abs_s8(a); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200131
132SIMD_INLINE v256 v256_mul_s16(v128 a, v128 b) { return c_v256_mul_s16(a, b); }
133SIMD_INLINE v256 v256_mullo_s16(v256 a, v256 b) {
134 return c_v256_mullo_s16(a, b);
135}
136SIMD_INLINE v256 v256_mulhi_s16(v256 a, v256 b) {
137 return c_v256_mulhi_s16(a, b);
138}
139SIMD_INLINE v256 v256_mullo_s32(v256 a, v256 b) {
140 return c_v256_mullo_s32(a, b);
141}
142SIMD_INLINE v256 v256_madd_s16(v256 a, v256 b) { return c_v256_madd_s16(a, b); }
143SIMD_INLINE v256 v256_madd_us8(v256 a, v256 b) { return c_v256_madd_us8(a, b); }
144
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200145SIMD_INLINE uint32_t v256_movemask_8(v256 a) { return c_v256_movemask_8(a); }
146SIMD_INLINE v256 v256_blend_8(v256 a, v256 b, v256 c) {
147 return c_v256_blend_8(a, b, c);
148}
149
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200150SIMD_INLINE v256 v256_avg_u8(v256 a, v256 b) { return c_v256_avg_u8(a, b); }
151SIMD_INLINE v256 v256_rdavg_u8(v256 a, v256 b) { return c_v256_rdavg_u8(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200152SIMD_INLINE v256 v256_rdavg_u16(v256 a, v256 b) {
153 return c_v256_rdavg_u16(a, b);
154}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200155SIMD_INLINE v256 v256_avg_u16(v256 a, v256 b) { return c_v256_avg_u16(a, b); }
156SIMD_INLINE v256 v256_min_u8(v256 a, v256 b) { return c_v256_min_u8(a, b); }
157SIMD_INLINE v256 v256_max_u8(v256 a, v256 b) { return c_v256_max_u8(a, b); }
158SIMD_INLINE v256 v256_min_s8(v256 a, v256 b) { return c_v256_min_s8(a, b); }
159SIMD_INLINE v256 v256_max_s8(v256 a, v256 b) { return c_v256_max_s8(a, b); }
160SIMD_INLINE v256 v256_min_s16(v256 a, v256 b) { return c_v256_min_s16(a, b); }
161SIMD_INLINE v256 v256_max_s16(v256 a, v256 b) { return c_v256_max_s16(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200162SIMD_INLINE v256 v256_min_s32(v256 a, v256 b) { return c_v256_min_s32(a, b); }
163SIMD_INLINE v256 v256_max_s32(v256 a, v256 b) { return c_v256_max_s32(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200164
165SIMD_INLINE v256 v256_ziplo_8(v256 a, v256 b) { return c_v256_ziplo_8(a, b); }
166SIMD_INLINE v256 v256_ziphi_8(v256 a, v256 b) { return c_v256_ziphi_8(a, b); }
167SIMD_INLINE v256 v256_ziplo_16(v256 a, v256 b) { return c_v256_ziplo_16(a, b); }
168SIMD_INLINE v256 v256_ziphi_16(v256 a, v256 b) { return c_v256_ziphi_16(a, b); }
169SIMD_INLINE v256 v256_ziplo_32(v256 a, v256 b) { return c_v256_ziplo_32(a, b); }
170SIMD_INLINE v256 v256_ziphi_32(v256 a, v256 b) { return c_v256_ziphi_32(a, b); }
171SIMD_INLINE v256 v256_ziplo_64(v256 a, v256 b) { return c_v256_ziplo_64(a, b); }
172SIMD_INLINE v256 v256_ziphi_64(v256 a, v256 b) { return c_v256_ziphi_64(a, b); }
173SIMD_INLINE v256 v256_ziplo_128(v256 a, v256 b) {
174 return c_v256_ziplo_128(a, b);
175}
176SIMD_INLINE v256 v256_ziphi_128(v256 a, v256 b) {
177 return c_v256_ziphi_128(a, b);
178}
179SIMD_INLINE v256 v256_zip_8(v128 a, v128 b) { return c_v256_zip_8(a, b); }
180SIMD_INLINE v256 v256_zip_16(v128 a, v128 b) { return c_v256_zip_16(a, b); }
181SIMD_INLINE v256 v256_zip_32(v128 a, v128 b) { return c_v256_zip_32(a, b); }
182SIMD_INLINE v256 v256_unziplo_8(v256 a, v256 b) {
183 return c_v256_unziplo_8(a, b);
184}
185SIMD_INLINE v256 v256_unziphi_8(v256 a, v256 b) {
186 return c_v256_unziphi_8(a, b);
187}
188SIMD_INLINE v256 v256_unziplo_16(v256 a, v256 b) {
189 return c_v256_unziplo_16(a, b);
190}
191SIMD_INLINE v256 v256_unziphi_16(v256 a, v256 b) {
192 return c_v256_unziphi_16(a, b);
193}
194SIMD_INLINE v256 v256_unziplo_32(v256 a, v256 b) {
195 return c_v256_unziplo_32(a, b);
196}
197SIMD_INLINE v256 v256_unziphi_32(v256 a, v256 b) {
198 return c_v256_unziphi_32(a, b);
199}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200200SIMD_INLINE v256 v256_unziplo_64(v256 a, v256 b) {
201 return c_v256_unziplo_64(a, b);
202}
203SIMD_INLINE v256 v256_unziphi_64(v256 a, v256 b) {
204 return c_v256_unziphi_64(a, b);
205}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200206SIMD_INLINE v256 v256_unpack_u8_s16(v128 a) { return c_v256_unpack_u8_s16(a); }
207SIMD_INLINE v256 v256_unpacklo_u8_s16(v256 a) {
208 return c_v256_unpacklo_u8_s16(a);
209}
210SIMD_INLINE v256 v256_unpackhi_u8_s16(v256 a) {
211 return c_v256_unpackhi_u8_s16(a);
212}
Steinar Midtskogen1b2b7392017-04-11 14:19:20 +0200213SIMD_INLINE v256 v256_unpack_s8_s16(v128 a) { return c_v256_unpack_s8_s16(a); }
214SIMD_INLINE v256 v256_unpacklo_s8_s16(v256 a) {
215 return c_v256_unpacklo_s8_s16(a);
216}
217SIMD_INLINE v256 v256_unpackhi_s8_s16(v256 a) {
218 return c_v256_unpackhi_s8_s16(a);
219}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200220SIMD_INLINE v256 v256_pack_s32_s16(v256 a, v256 b) {
221 return c_v256_pack_s32_s16(a, b);
222}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200223SIMD_INLINE v256 v256_pack_s32_u16(v256 a, v256 b) {
224 return c_v256_pack_s32_u16(a, b);
225}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200226SIMD_INLINE v256 v256_pack_s16_u8(v256 a, v256 b) {
227 return c_v256_pack_s16_u8(a, b);
228}
229SIMD_INLINE v256 v256_pack_s16_s8(v256 a, v256 b) {
230 return c_v256_pack_s16_s8(a, b);
231}
232SIMD_INLINE v256 v256_unpack_u16_s32(v128 a) {
233 return c_v256_unpack_u16_s32(a);
234}
235SIMD_INLINE v256 v256_unpack_s16_s32(v128 a) {
236 return c_v256_unpack_s16_s32(a);
237}
238SIMD_INLINE v256 v256_unpacklo_u16_s32(v256 a) {
239 return c_v256_unpacklo_u16_s32(a);
240}
241SIMD_INLINE v256 v256_unpacklo_s16_s32(v256 a) {
242 return c_v256_unpacklo_s16_s32(a);
243}
244SIMD_INLINE v256 v256_unpackhi_u16_s32(v256 a) {
245 return c_v256_unpackhi_u16_s32(a);
246}
247SIMD_INLINE v256 v256_unpackhi_s16_s32(v256 a) {
248 return c_v256_unpackhi_s16_s32(a);
249}
250SIMD_INLINE v256 v256_shuffle_8(v256 a, v256 pattern) {
251 return c_v256_shuffle_8(a, pattern);
252}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200253SIMD_INLINE v256 v256_wideshuffle_8(v256 a, v256 b, v256 pattern) {
254 return c_v256_wideshuffle_8(a, b, pattern);
255}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200256SIMD_INLINE v256 v256_pshuffle_8(v256 a, v256 pattern) {
257 return c_v256_pshuffle_8(a, pattern);
258}
259
260SIMD_INLINE v256 v256_cmpgt_s8(v256 a, v256 b) { return c_v256_cmpgt_s8(a, b); }
261SIMD_INLINE v256 v256_cmplt_s8(v256 a, v256 b) { return c_v256_cmplt_s8(a, b); }
262SIMD_INLINE v256 v256_cmpeq_8(v256 a, v256 b) { return c_v256_cmpeq_8(a, b); }
263SIMD_INLINE v256 v256_cmpgt_s16(v256 a, v256 b) {
264 return c_v256_cmpgt_s16(a, b);
265}
266SIMD_INLINE v256 v256_cmplt_s16(v256 a, v256 b) {
267 return c_v256_cmplt_s16(a, b);
268}
269SIMD_INLINE v256 v256_cmpeq_16(v256 a, v256 b) { return c_v256_cmpeq_16(a, b); }
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200270SIMD_INLINE v256 v256_cmpeq_32(v256 a, v256 b) { return c_v256_cmpeq_32(a, b); }
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200271
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200272SIMD_INLINE v256 v256_cmpgt_s32(v256 a, v256 b) {
273 return c_v256_cmpgt_s32(a, b);
274}
275SIMD_INLINE v256 v256_cmplt_s32(v256 a, v256 b) {
276 return c_v256_cmplt_s32(a, b);
277}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200278SIMD_INLINE v256 v256_shl_8(v256 a, unsigned int c) {
279 return c_v256_shl_8(a, c);
280}
281SIMD_INLINE v256 v256_shr_u8(v256 a, unsigned int c) {
282 return c_v256_shr_u8(a, c);
283}
284SIMD_INLINE v256 v256_shr_s8(v256 a, unsigned int c) {
285 return c_v256_shr_s8(a, c);
286}
287SIMD_INLINE v256 v256_shl_16(v256 a, unsigned int c) {
288 return c_v256_shl_16(a, c);
289}
290SIMD_INLINE v256 v256_shr_u16(v256 a, unsigned int c) {
291 return c_v256_shr_u16(a, c);
292}
293SIMD_INLINE v256 v256_shr_s16(v256 a, unsigned int c) {
294 return c_v256_shr_s16(a, c);
295}
296SIMD_INLINE v256 v256_shl_32(v256 a, unsigned int c) {
297 return c_v256_shl_32(a, c);
298}
299SIMD_INLINE v256 v256_shr_u32(v256 a, unsigned int c) {
300 return c_v256_shr_u32(a, c);
301}
302SIMD_INLINE v256 v256_shr_s32(v256 a, unsigned int c) {
303 return c_v256_shr_s32(a, c);
304}
Steinar Midtskogend8f95212018-06-27 19:41:11 +0200305SIMD_INLINE v256 v256_shl_64(v256 a, unsigned int c) {
306 return c_v256_shl_64(a, c);
307}
308SIMD_INLINE v256 v256_shr_u64(v256 a, unsigned int c) {
309 return c_v256_shr_u64(a, c);
310}
311SIMD_INLINE v256 v256_shr_s64(v256 a, unsigned int c) {
312 return c_v256_shr_s64(a, c);
313}
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200314
Yaowu Xu032573d2017-04-24 15:04:17 -0700315SIMD_INLINE v256 v256_shr_n_byte(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200316 return c_v256_shr_n_byte(a, n);
317}
Yaowu Xu032573d2017-04-24 15:04:17 -0700318SIMD_INLINE v256 v256_shl_n_byte(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200319 return c_v256_shl_n_byte(a, n);
320}
Yaowu Xu032573d2017-04-24 15:04:17 -0700321SIMD_INLINE v256 v256_shl_n_8(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200322 return c_v256_shl_n_8(a, n);
323}
Yaowu Xu032573d2017-04-24 15:04:17 -0700324SIMD_INLINE v256 v256_shl_n_16(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200325 return c_v256_shl_n_16(a, n);
326}
Yaowu Xu032573d2017-04-24 15:04:17 -0700327SIMD_INLINE v256 v256_shl_n_32(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200328 return c_v256_shl_n_32(a, n);
329}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200330SIMD_INLINE v256 v256_shl_n_64(v256 a, unsigned int n) {
331 return c_v256_shl_n_64(a, n);
332}
Yaowu Xu032573d2017-04-24 15:04:17 -0700333SIMD_INLINE v256 v256_shr_n_u8(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200334 return c_v256_shr_n_u8(a, n);
335}
Yaowu Xu032573d2017-04-24 15:04:17 -0700336SIMD_INLINE v256 v256_shr_n_u16(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200337 return c_v256_shr_n_u16(a, n);
338}
Yaowu Xu032573d2017-04-24 15:04:17 -0700339SIMD_INLINE v256 v256_shr_n_u32(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200340 return c_v256_shr_n_u32(a, n);
341}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200342SIMD_INLINE v256 v256_shr_n_u64(v256 a, unsigned int n) {
343 return c_v256_shr_n_u64(a, n);
344}
Yaowu Xu032573d2017-04-24 15:04:17 -0700345SIMD_INLINE v256 v256_shr_n_s8(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200346 return c_v256_shr_n_s8(a, n);
347}
Yaowu Xu032573d2017-04-24 15:04:17 -0700348SIMD_INLINE v256 v256_shr_n_s16(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200349 return c_v256_shr_n_s16(a, n);
350}
Yaowu Xu032573d2017-04-24 15:04:17 -0700351SIMD_INLINE v256 v256_shr_n_s32(v256 a, unsigned int n) {
Steinar Midtskogen045d4132016-10-18 12:20:05 +0200352 return c_v256_shr_n_s32(a, n);
353}
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200354SIMD_INLINE v256 v256_shr_n_s64(v256 a, unsigned int n) {
355 return c_v256_shr_n_s64(a, n);
356}
357
358SIMD_INLINE v256 v256_shr_n_word(v256 a, unsigned int n) {
359 return c_v256_shr_n_word(a, n);
360}
361SIMD_INLINE v256 v256_shl_n_word(v256 a, unsigned int n) {
362 return c_v256_shl_n_word(a, n);
363}
364
365typedef uint32_t sad256_internal_u16;
Steinar Midtskogen50b2fc22020-03-24 14:23:51 +0100366SIMD_INLINE sad256_internal_u16 v256_sad_u16_init(void) {
Steinar Midtskogen0578d432018-05-28 14:47:36 +0200367 return c_v256_sad_u16_init();
368}
369SIMD_INLINE sad256_internal_u16 v256_sad_u16(sad256_internal_u16 s, v256 a,
370 v256 b) {
371 return c_v256_sad_u16(s, a, b);
372}
373SIMD_INLINE uint32_t v256_sad_u16_sum(sad256_internal_u16 s) {
374 return c_v256_sad_u16_sum(s);
375}
376
James Zerne1cbb132018-08-22 14:10:36 -0700377#endif // AOM_AOM_DSP_SIMD_V256_INTRINSICS_H_