1 /*
2 * Copyright (c) 2016, Alliance for Open Media. All rights reserved.
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12 #ifndef AOM_AOM_DSP_SIMD_V128_INTRINSICS_H_
13 #define AOM_AOM_DSP_SIMD_V128_INTRINSICS_H_
14
15 #include <stdio.h>
16 #include <stdlib.h>
17 #include <string.h>
18
19 #include "aom_dsp/simd/v128_intrinsics_c.h"
20 #include "aom_dsp/simd/v64_intrinsics.h"
21
22 /* Fallback to plain, unoptimised C. */
23
24 typedef c_v128 v128;
25
v128_low_u32(v128 a)26 SIMD_INLINE uint32_t v128_low_u32(v128 a) { return c_v128_low_u32(a); }
v128_low_v64(v128 a)27 SIMD_INLINE v64 v128_low_v64(v128 a) { return c_v128_low_v64(a); }
v128_high_v64(v128 a)28 SIMD_INLINE v64 v128_high_v64(v128 a) { return c_v128_high_v64(a); }
v128_from_64(uint64_t hi,uint64_t lo)29 SIMD_INLINE v128 v128_from_64(uint64_t hi, uint64_t lo) {
30 return c_v128_from_64(hi, lo);
31 }
v128_from_v64(v64 hi,v64 lo)32 SIMD_INLINE v128 v128_from_v64(v64 hi, v64 lo) {
33 return c_v128_from_v64(hi, lo);
34 }
v128_from_32(uint32_t a,uint32_t b,uint32_t c,uint32_t d)35 SIMD_INLINE v128 v128_from_32(uint32_t a, uint32_t b, uint32_t c, uint32_t d) {
36 return c_v128_from_32(a, b, c, d);
37 }
38
v128_load_unaligned(const void * p)39 SIMD_INLINE v128 v128_load_unaligned(const void *p) {
40 return c_v128_load_unaligned(p);
41 }
v128_load_aligned(const void * p)42 SIMD_INLINE v128 v128_load_aligned(const void *p) {
43 return c_v128_load_aligned(p);
44 }
45
v128_store_unaligned(void * p,v128 a)46 SIMD_INLINE void v128_store_unaligned(void *p, v128 a) {
47 c_v128_store_unaligned(p, a);
48 }
v128_store_aligned(void * p,v128 a)49 SIMD_INLINE void v128_store_aligned(void *p, v128 a) {
50 c_v128_store_aligned(p, a);
51 }
52
v128_align(v128 a,v128 b,unsigned int c)53 SIMD_INLINE v128 v128_align(v128 a, v128 b, unsigned int c) {
54 return c_v128_align(a, b, c);
55 }
56
v128_zero(void)57 SIMD_INLINE v128 v128_zero(void) { return c_v128_zero(); }
v128_dup_8(uint8_t x)58 SIMD_INLINE v128 v128_dup_8(uint8_t x) { return c_v128_dup_8(x); }
v128_dup_16(uint16_t x)59 SIMD_INLINE v128 v128_dup_16(uint16_t x) { return c_v128_dup_16(x); }
v128_dup_32(uint32_t x)60 SIMD_INLINE v128 v128_dup_32(uint32_t x) { return c_v128_dup_32(x); }
v128_dup_64(uint64_t x)61 SIMD_INLINE v128 v128_dup_64(uint64_t x) { return c_v128_dup_64(x); }
62
v128_sad_u8_init(void)63 SIMD_INLINE c_sad128_internal v128_sad_u8_init(void) {
64 return c_v128_sad_u8_init();
65 }
v128_sad_u8(c_sad128_internal s,v128 a,v128 b)66 SIMD_INLINE c_sad128_internal v128_sad_u8(c_sad128_internal s, v128 a, v128 b) {
67 return c_v128_sad_u8(s, a, b);
68 }
v128_sad_u8_sum(c_sad128_internal s)69 SIMD_INLINE uint32_t v128_sad_u8_sum(c_sad128_internal s) {
70 return c_v128_sad_u8_sum(s);
71 }
v128_ssd_u8_init(void)72 SIMD_INLINE c_ssd128_internal v128_ssd_u8_init(void) {
73 return c_v128_ssd_u8_init();
74 }
v128_ssd_u8(c_ssd128_internal s,v128 a,v128 b)75 SIMD_INLINE c_ssd128_internal v128_ssd_u8(c_ssd128_internal s, v128 a, v128 b) {
76 return c_v128_ssd_u8(s, a, b);
77 }
v128_ssd_u8_sum(c_ssd128_internal s)78 SIMD_INLINE uint32_t v128_ssd_u8_sum(c_ssd128_internal s) {
79 return c_v128_ssd_u8_sum(s);
80 }
v128_dotp_su8(v128 a,v128 b)81 SIMD_INLINE int64_t v128_dotp_su8(v128 a, v128 b) {
82 return c_v128_dotp_su8(a, b);
83 }
v128_dotp_s16(v128 a,v128 b)84 SIMD_INLINE int64_t v128_dotp_s16(v128 a, v128 b) {
85 return c_v128_dotp_s16(a, b);
86 }
v128_dotp_s32(v128 a,v128 b)87 SIMD_INLINE int64_t v128_dotp_s32(v128 a, v128 b) {
88 return c_v128_dotp_s32(a, b);
89 }
v128_hadd_u8(v128 a)90 SIMD_INLINE uint64_t v128_hadd_u8(v128 a) { return c_v128_hadd_u8(a); }
91
v128_or(v128 a,v128 b)92 SIMD_INLINE v128 v128_or(v128 a, v128 b) { return c_v128_or(a, b); }
v128_xor(v128 a,v128 b)93 SIMD_INLINE v128 v128_xor(v128 a, v128 b) { return c_v128_xor(a, b); }
v128_and(v128 a,v128 b)94 SIMD_INLINE v128 v128_and(v128 a, v128 b) { return c_v128_and(a, b); }
v128_andn(v128 a,v128 b)95 SIMD_INLINE v128 v128_andn(v128 a, v128 b) { return c_v128_andn(a, b); }
96
v128_add_8(v128 a,v128 b)97 SIMD_INLINE v128 v128_add_8(v128 a, v128 b) { return c_v128_add_8(a, b); }
v128_add_16(v128 a,v128 b)98 SIMD_INLINE v128 v128_add_16(v128 a, v128 b) { return c_v128_add_16(a, b); }
v128_sadd_u8(v128 a,v128 b)99 SIMD_INLINE v128 v128_sadd_u8(v128 a, v128 b) { return c_v128_sadd_u8(a, b); }
v128_sadd_s8(v128 a,v128 b)100 SIMD_INLINE v128 v128_sadd_s8(v128 a, v128 b) { return c_v128_sadd_s8(a, b); }
v128_sadd_s16(v128 a,v128 b)101 SIMD_INLINE v128 v128_sadd_s16(v128 a, v128 b) { return c_v128_sadd_s16(a, b); }
v128_add_32(v128 a,v128 b)102 SIMD_INLINE v128 v128_add_32(v128 a, v128 b) { return c_v128_add_32(a, b); }
v128_add_64(v128 a,v128 b)103 SIMD_INLINE v128 v128_add_64(v128 a, v128 b) { return c_v128_add_64(a, b); }
v128_padd_u8(v128 a)104 SIMD_INLINE v128 v128_padd_u8(v128 a) { return c_v128_padd_u8(a); }
v128_padd_s16(v128 a)105 SIMD_INLINE v128 v128_padd_s16(v128 a) { return c_v128_padd_s16(a); }
v128_sub_8(v128 a,v128 b)106 SIMD_INLINE v128 v128_sub_8(v128 a, v128 b) { return c_v128_sub_8(a, b); }
v128_ssub_u8(v128 a,v128 b)107 SIMD_INLINE v128 v128_ssub_u8(v128 a, v128 b) { return c_v128_ssub_u8(a, b); }
v128_ssub_s8(v128 a,v128 b)108 SIMD_INLINE v128 v128_ssub_s8(v128 a, v128 b) { return c_v128_ssub_s8(a, b); }
v128_sub_16(v128 a,v128 b)109 SIMD_INLINE v128 v128_sub_16(v128 a, v128 b) { return c_v128_sub_16(a, b); }
v128_ssub_s16(v128 a,v128 b)110 SIMD_INLINE v128 v128_ssub_s16(v128 a, v128 b) { return c_v128_ssub_s16(a, b); }
v128_ssub_u16(v128 a,v128 b)111 SIMD_INLINE v128 v128_ssub_u16(v128 a, v128 b) { return c_v128_ssub_u16(a, b); }
v128_sub_32(v128 a,v128 b)112 SIMD_INLINE v128 v128_sub_32(v128 a, v128 b) { return c_v128_sub_32(a, b); }
v128_sub_64(v128 a,v128 b)113 SIMD_INLINE v128 v128_sub_64(v128 a, v128 b) { return c_v128_sub_64(a, b); }
v128_abs_s16(v128 a)114 SIMD_INLINE v128 v128_abs_s16(v128 a) { return c_v128_abs_s16(a); }
v128_abs_s8(v128 a)115 SIMD_INLINE v128 v128_abs_s8(v128 a) { return c_v128_abs_s8(a); }
116
v128_mul_s16(v64 a,v64 b)117 SIMD_INLINE v128 v128_mul_s16(v64 a, v64 b) { return c_v128_mul_s16(a, b); }
v128_mullo_s16(v128 a,v128 b)118 SIMD_INLINE v128 v128_mullo_s16(v128 a, v128 b) {
119 return c_v128_mullo_s16(a, b);
120 }
v128_mulhi_s16(v128 a,v128 b)121 SIMD_INLINE v128 v128_mulhi_s16(v128 a, v128 b) {
122 return c_v128_mulhi_s16(a, b);
123 }
v128_mullo_s32(v128 a,v128 b)124 SIMD_INLINE v128 v128_mullo_s32(v128 a, v128 b) {
125 return c_v128_mullo_s32(a, b);
126 }
v128_madd_s16(v128 a,v128 b)127 SIMD_INLINE v128 v128_madd_s16(v128 a, v128 b) { return c_v128_madd_s16(a, b); }
v128_madd_us8(v128 a,v128 b)128 SIMD_INLINE v128 v128_madd_us8(v128 a, v128 b) { return c_v128_madd_us8(a, b); }
129
v128_movemask_8(v128 a)130 SIMD_INLINE uint32_t v128_movemask_8(v128 a) { return c_v128_movemask_8(a); }
v128_blend_8(v128 a,v128 b,v128 c)131 SIMD_INLINE v128 v128_blend_8(v128 a, v128 b, v128 c) {
132 return c_v128_blend_8(a, b, c);
133 }
134
v128_avg_u8(v128 a,v128 b)135 SIMD_INLINE v128 v128_avg_u8(v128 a, v128 b) { return c_v128_avg_u8(a, b); }
v128_rdavg_u8(v128 a,v128 b)136 SIMD_INLINE v128 v128_rdavg_u8(v128 a, v128 b) { return c_v128_rdavg_u8(a, b); }
v128_rdavg_u16(v128 a,v128 b)137 SIMD_INLINE v128 v128_rdavg_u16(v128 a, v128 b) {
138 return c_v128_rdavg_u16(a, b);
139 }
v128_avg_u16(v128 a,v128 b)140 SIMD_INLINE v128 v128_avg_u16(v128 a, v128 b) { return c_v128_avg_u16(a, b); }
v128_min_u8(v128 a,v128 b)141 SIMD_INLINE v128 v128_min_u8(v128 a, v128 b) { return c_v128_min_u8(a, b); }
v128_max_u8(v128 a,v128 b)142 SIMD_INLINE v128 v128_max_u8(v128 a, v128 b) { return c_v128_max_u8(a, b); }
v128_min_s8(v128 a,v128 b)143 SIMD_INLINE v128 v128_min_s8(v128 a, v128 b) { return c_v128_min_s8(a, b); }
v128_max_s8(v128 a,v128 b)144 SIMD_INLINE v128 v128_max_s8(v128 a, v128 b) { return c_v128_max_s8(a, b); }
v128_min_s16(v128 a,v128 b)145 SIMD_INLINE v128 v128_min_s16(v128 a, v128 b) { return c_v128_min_s16(a, b); }
v128_max_s16(v128 a,v128 b)146 SIMD_INLINE v128 v128_max_s16(v128 a, v128 b) { return c_v128_max_s16(a, b); }
v128_min_s32(v128 a,v128 b)147 SIMD_INLINE v128 v128_min_s32(v128 a, v128 b) { return c_v128_min_s32(a, b); }
v128_max_s32(v128 a,v128 b)148 SIMD_INLINE v128 v128_max_s32(v128 a, v128 b) { return c_v128_max_s32(a, b); }
149
v128_ziplo_8(v128 a,v128 b)150 SIMD_INLINE v128 v128_ziplo_8(v128 a, v128 b) { return c_v128_ziplo_8(a, b); }
v128_ziphi_8(v128 a,v128 b)151 SIMD_INLINE v128 v128_ziphi_8(v128 a, v128 b) { return c_v128_ziphi_8(a, b); }
v128_ziplo_16(v128 a,v128 b)152 SIMD_INLINE v128 v128_ziplo_16(v128 a, v128 b) { return c_v128_ziplo_16(a, b); }
v128_ziphi_16(v128 a,v128 b)153 SIMD_INLINE v128 v128_ziphi_16(v128 a, v128 b) { return c_v128_ziphi_16(a, b); }
v128_ziplo_32(v128 a,v128 b)154 SIMD_INLINE v128 v128_ziplo_32(v128 a, v128 b) { return c_v128_ziplo_32(a, b); }
v128_ziphi_32(v128 a,v128 b)155 SIMD_INLINE v128 v128_ziphi_32(v128 a, v128 b) { return c_v128_ziphi_32(a, b); }
v128_ziplo_64(v128 a,v128 b)156 SIMD_INLINE v128 v128_ziplo_64(v128 a, v128 b) { return c_v128_ziplo_64(a, b); }
v128_ziphi_64(v128 a,v128 b)157 SIMD_INLINE v128 v128_ziphi_64(v128 a, v128 b) { return c_v128_ziphi_64(a, b); }
v128_zip_8(v64 a,v64 b)158 SIMD_INLINE v128 v128_zip_8(v64 a, v64 b) { return c_v128_zip_8(a, b); }
v128_zip_16(v64 a,v64 b)159 SIMD_INLINE v128 v128_zip_16(v64 a, v64 b) { return c_v128_zip_16(a, b); }
v128_zip_32(v64 a,v64 b)160 SIMD_INLINE v128 v128_zip_32(v64 a, v64 b) { return c_v128_zip_32(a, b); }
v128_unziplo_8(v128 a,v128 b)161 SIMD_INLINE v128 v128_unziplo_8(v128 a, v128 b) {
162 return c_v128_unziplo_8(a, b);
163 }
v128_unziphi_8(v128 a,v128 b)164 SIMD_INLINE v128 v128_unziphi_8(v128 a, v128 b) {
165 return c_v128_unziphi_8(a, b);
166 }
v128_unziplo_16(v128 a,v128 b)167 SIMD_INLINE v128 v128_unziplo_16(v128 a, v128 b) {
168 return c_v128_unziplo_16(a, b);
169 }
v128_unziphi_16(v128 a,v128 b)170 SIMD_INLINE v128 v128_unziphi_16(v128 a, v128 b) {
171 return c_v128_unziphi_16(a, b);
172 }
v128_unziplo_32(v128 a,v128 b)173 SIMD_INLINE v128 v128_unziplo_32(v128 a, v128 b) {
174 return c_v128_unziplo_32(a, b);
175 }
v128_unziphi_32(v128 a,v128 b)176 SIMD_INLINE v128 v128_unziphi_32(v128 a, v128 b) {
177 return c_v128_unziphi_32(a, b);
178 }
v128_unpack_u8_s16(v64 a)179 SIMD_INLINE v128 v128_unpack_u8_s16(v64 a) { return c_v128_unpack_u8_s16(a); }
v128_unpacklo_u8_s16(v128 a)180 SIMD_INLINE v128 v128_unpacklo_u8_s16(v128 a) {
181 return c_v128_unpacklo_u8_s16(a);
182 }
v128_unpackhi_u8_s16(v128 a)183 SIMD_INLINE v128 v128_unpackhi_u8_s16(v128 a) {
184 return c_v128_unpackhi_u8_s16(a);
185 }
v128_unpack_s8_s16(v64 a)186 SIMD_INLINE v128 v128_unpack_s8_s16(v64 a) { return c_v128_unpack_s8_s16(a); }
v128_unpacklo_s8_s16(v128 a)187 SIMD_INLINE v128 v128_unpacklo_s8_s16(v128 a) {
188 return c_v128_unpacklo_s8_s16(a);
189 }
v128_unpackhi_s8_s16(v128 a)190 SIMD_INLINE v128 v128_unpackhi_s8_s16(v128 a) {
191 return c_v128_unpackhi_s8_s16(a);
192 }
v128_pack_s32_s16(v128 a,v128 b)193 SIMD_INLINE v128 v128_pack_s32_s16(v128 a, v128 b) {
194 return c_v128_pack_s32_s16(a, b);
195 }
v128_pack_s32_u16(v128 a,v128 b)196 SIMD_INLINE v128 v128_pack_s32_u16(v128 a, v128 b) {
197 return c_v128_pack_s32_u16(a, b);
198 }
v128_pack_s16_u8(v128 a,v128 b)199 SIMD_INLINE v128 v128_pack_s16_u8(v128 a, v128 b) {
200 return c_v128_pack_s16_u8(a, b);
201 }
v128_pack_s16_s8(v128 a,v128 b)202 SIMD_INLINE v128 v128_pack_s16_s8(v128 a, v128 b) {
203 return c_v128_pack_s16_s8(a, b);
204 }
v128_unpack_u16_s32(v64 a)205 SIMD_INLINE v128 v128_unpack_u16_s32(v64 a) { return c_v128_unpack_u16_s32(a); }
v128_unpack_s16_s32(v64 a)206 SIMD_INLINE v128 v128_unpack_s16_s32(v64 a) { return c_v128_unpack_s16_s32(a); }
v128_unpacklo_u16_s32(v128 a)207 SIMD_INLINE v128 v128_unpacklo_u16_s32(v128 a) {
208 return c_v128_unpacklo_u16_s32(a);
209 }
v128_unpacklo_s16_s32(v128 a)210 SIMD_INLINE v128 v128_unpacklo_s16_s32(v128 a) {
211 return c_v128_unpacklo_s16_s32(a);
212 }
v128_unpackhi_u16_s32(v128 a)213 SIMD_INLINE v128 v128_unpackhi_u16_s32(v128 a) {
214 return c_v128_unpackhi_u16_s32(a);
215 }
v128_unpackhi_s16_s32(v128 a)216 SIMD_INLINE v128 v128_unpackhi_s16_s32(v128 a) {
217 return c_v128_unpackhi_s16_s32(a);
218 }
v128_shuffle_8(v128 a,v128 pattern)219 SIMD_INLINE v128 v128_shuffle_8(v128 a, v128 pattern) {
220 return c_v128_shuffle_8(a, pattern);
221 }
222
v128_cmpgt_s8(v128 a,v128 b)223 SIMD_INLINE v128 v128_cmpgt_s8(v128 a, v128 b) { return c_v128_cmpgt_s8(a, b); }
v128_cmplt_s8(v128 a,v128 b)224 SIMD_INLINE v128 v128_cmplt_s8(v128 a, v128 b) { return c_v128_cmplt_s8(a, b); }
v128_cmpeq_8(v128 a,v128 b)225 SIMD_INLINE v128 v128_cmpeq_8(v128 a, v128 b) { return c_v128_cmpeq_8(a, b); }
v128_cmpgt_s16(v128 a,v128 b)226 SIMD_INLINE v128 v128_cmpgt_s16(v128 a, v128 b) {
227 return c_v128_cmpgt_s16(a, b);
228 }
v128_cmplt_s16(v128 a,v128 b)229 SIMD_INLINE v128 v128_cmplt_s16(v128 a, v128 b) {
230 return c_v128_cmplt_s16(a, b);
231 }
v128_cmpeq_16(v128 a,v128 b)232 SIMD_INLINE v128 v128_cmpeq_16(v128 a, v128 b) { return c_v128_cmpeq_16(a, b); }
233
v128_cmpgt_s32(v128 a,v128 b)234 SIMD_INLINE v128 v128_cmpgt_s32(v128 a, v128 b) {
235 return c_v128_cmpgt_s32(a, b);
236 }
v128_cmplt_s32(v128 a,v128 b)237 SIMD_INLINE v128 v128_cmplt_s32(v128 a, v128 b) {
238 return c_v128_cmplt_s32(a, b);
239 }
v128_cmpeq_32(v128 a,v128 b)240 SIMD_INLINE v128 v128_cmpeq_32(v128 a, v128 b) { return c_v128_cmpeq_32(a, b); }
241
v128_shl_8(v128 a,unsigned int c)242 SIMD_INLINE v128 v128_shl_8(v128 a, unsigned int c) {
243 return c_v128_shl_8(a, c);
244 }
v128_shr_u8(v128 a,unsigned int c)245 SIMD_INLINE v128 v128_shr_u8(v128 a, unsigned int c) {
246 return c_v128_shr_u8(a, c);
247 }
v128_shr_s8(v128 a,unsigned int c)248 SIMD_INLINE v128 v128_shr_s8(v128 a, unsigned int c) {
249 return c_v128_shr_s8(a, c);
250 }
v128_shl_16(v128 a,unsigned int c)251 SIMD_INLINE v128 v128_shl_16(v128 a, unsigned int c) {
252 return c_v128_shl_16(a, c);
253 }
v128_shr_u16(v128 a,unsigned int c)254 SIMD_INLINE v128 v128_shr_u16(v128 a, unsigned int c) {
255 return c_v128_shr_u16(a, c);
256 }
v128_shr_s16(v128 a,unsigned int c)257 SIMD_INLINE v128 v128_shr_s16(v128 a, unsigned int c) {
258 return c_v128_shr_s16(a, c);
259 }
v128_shl_32(v128 a,unsigned int c)260 SIMD_INLINE v128 v128_shl_32(v128 a, unsigned int c) {
261 return c_v128_shl_32(a, c);
262 }
v128_shr_u32(v128 a,unsigned int c)263 SIMD_INLINE v128 v128_shr_u32(v128 a, unsigned int c) {
264 return c_v128_shr_u32(a, c);
265 }
v128_shr_s32(v128 a,unsigned int c)266 SIMD_INLINE v128 v128_shr_s32(v128 a, unsigned int c) {
267 return c_v128_shr_s32(a, c);
268 }
v128_shl_64(v128 a,unsigned int c)269 SIMD_INLINE v128 v128_shl_64(v128 a, unsigned int c) {
270 return c_v128_shl_64(a, c);
271 }
v128_shr_u64(v128 a,unsigned int c)272 SIMD_INLINE v128 v128_shr_u64(v128 a, unsigned int c) {
273 return c_v128_shr_u64(a, c);
274 }
v128_shr_s64(v128 a,unsigned int c)275 SIMD_INLINE v128 v128_shr_s64(v128 a, unsigned int c) {
276 return c_v128_shr_s64(a, c);
277 }
278
v128_shr_n_byte(v128 a,unsigned int n)279 SIMD_INLINE v128 v128_shr_n_byte(v128 a, unsigned int n) {
280 return c_v128_shr_n_byte(a, n);
281 }
v128_shl_n_byte(v128 a,unsigned int n)282 SIMD_INLINE v128 v128_shl_n_byte(v128 a, unsigned int n) {
283 return c_v128_shl_n_byte(a, n);
284 }
v128_shl_n_8(v128 a,unsigned int n)285 SIMD_INLINE v128 v128_shl_n_8(v128 a, unsigned int n) {
286 return c_v128_shl_n_8(a, n);
287 }
v128_shl_n_16(v128 a,unsigned int n)288 SIMD_INLINE v128 v128_shl_n_16(v128 a, unsigned int n) {
289 return c_v128_shl_n_16(a, n);
290 }
v128_shl_n_32(v128 a,unsigned int n)291 SIMD_INLINE v128 v128_shl_n_32(v128 a, unsigned int n) {
292 return c_v128_shl_n_32(a, n);
293 }
v128_shl_n_64(v128 a,unsigned int n)294 SIMD_INLINE v128 v128_shl_n_64(v128 a, unsigned int n) {
295 return c_v128_shl_n_64(a, n);
296 }
v128_shr_n_u8(v128 a,unsigned int n)297 SIMD_INLINE v128 v128_shr_n_u8(v128 a, unsigned int n) {
298 return c_v128_shr_n_u8(a, n);
299 }
v128_shr_n_u16(v128 a,unsigned int n)300 SIMD_INLINE v128 v128_shr_n_u16(v128 a, unsigned int n) {
301 return c_v128_shr_n_u16(a, n);
302 }
v128_shr_n_u32(v128 a,unsigned int n)303 SIMD_INLINE v128 v128_shr_n_u32(v128 a, unsigned int n) {
304 return c_v128_shr_n_u32(a, n);
305 }
v128_shr_n_u64(v128 a,unsigned int n)306 SIMD_INLINE v128 v128_shr_n_u64(v128 a, unsigned int n) {
307 return c_v128_shr_n_u64(a, n);
308 }
v128_shr_n_s8(v128 a,unsigned int n)309 SIMD_INLINE v128 v128_shr_n_s8(v128 a, unsigned int n) {
310 return c_v128_shr_n_s8(a, n);
311 }
v128_shr_n_s16(v128 a,unsigned int n)312 SIMD_INLINE v128 v128_shr_n_s16(v128 a, unsigned int n) {
313 return c_v128_shr_n_s16(a, n);
314 }
v128_shr_n_s32(v128 a,unsigned int n)315 SIMD_INLINE v128 v128_shr_n_s32(v128 a, unsigned int n) {
316 return c_v128_shr_n_s32(a, n);
317 }
v128_shr_n_s64(v128 a,unsigned int n)318 SIMD_INLINE v128 v128_shr_n_s64(v128 a, unsigned int n) {
319 return c_v128_shr_n_s64(a, n);
320 }
321
322 typedef uint32_t sad128_internal_u16;
v128_sad_u16_init(void)323 SIMD_INLINE sad128_internal_u16 v128_sad_u16_init(void) {
324 return c_v128_sad_u16_init();
325 }
v128_sad_u16(sad128_internal_u16 s,v128 a,v128 b)326 SIMD_INLINE sad128_internal_u16 v128_sad_u16(sad128_internal_u16 s, v128 a,
327 v128 b) {
328 return c_v128_sad_u16(s, a, b);
329 }
v128_sad_u16_sum(sad128_internal_u16 s)330 SIMD_INLINE uint32_t v128_sad_u16_sum(sad128_internal_u16 s) {
331 return c_v128_sad_u16_sum(s);
332 }
333
334 typedef uint64_t ssd128_internal_s16;
v128_ssd_s16_init(void)335 SIMD_INLINE ssd128_internal_s16 v128_ssd_s16_init(void) {
336 return c_v128_ssd_s16_init();
337 }
v128_ssd_s16(ssd128_internal_s16 s,v128 a,v128 b)338 SIMD_INLINE ssd128_internal_s16 v128_ssd_s16(ssd128_internal_s16 s, v128 a,
339 v128 b) {
340 return c_v128_ssd_s16(s, a, b);
341 }
v128_ssd_s16_sum(ssd128_internal_s16 s)342 SIMD_INLINE uint64_t v128_ssd_s16_sum(ssd128_internal_s16 s) {
343 return c_v128_ssd_s16_sum(s);
344 }
345
346 #endif // AOM_AOM_DSP_SIMD_V128_INTRINSICS_H_
347