xref: /aosp_15_r20/external/XNNPACK/src/qs8-igemm/gen/4x16c2s4-minmax-rndnu-neon-mlal.c (revision 4bdc94577ba0e567308109d787f7fec7b531ce36)
1 // Auto-generated file. Do not edit!
2 //   Template: src/qs8-igemm/c2-neon-mull-shuffle.c.in
3 //   Generator: tools/xngen
4 //
5 // Copyright 2021 Google LLC
6 //
7 // This source code is licensed under the BSD-style license found in the
8 // LICENSE file in the root directory of this source tree.
9 
10 #include <assert.h>
11 
12 #include <arm_neon.h>
13 
14 #include <xnnpack/gemm.h>
15 #include <xnnpack/math.h>
16 
17 
xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2s4__neon_mlal(size_t mr,size_t nc,size_t kc,size_t ks,const int8_t ** restrict a,const void * restrict w,int8_t * restrict c,size_t cm_stride,size_t cn_stride,size_t a_offset,const int8_t * zero,const union xnn_qs8_conv_minmax_params params[restrict XNN_MIN_ELEMENTS (1)])18 void xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2s4__neon_mlal(
19     size_t mr,
20     size_t nc,
21     size_t kc,
22     size_t ks,
23     const int8_t** restrict a,
24     const void* restrict w,
25     int8_t* restrict c,
26     size_t cm_stride,
27     size_t cn_stride,
28     size_t a_offset,
29     const int8_t* zero,
30     const union xnn_qs8_conv_minmax_params params[restrict XNN_MIN_ELEMENTS(1)]) XNN_OOB_READS
31 {
32   assert(mr != 0);
33   assert(mr <= 4);
34   assert(nc != 0);
35   assert(kc != 0);
36   assert(ks != 0);
37   assert(ks % (4 * sizeof(void*)) == 0);
38   assert(a_offset % sizeof(int8_t) == 0);
39   assert(a != NULL);
40   assert(w != NULL);
41   assert(c != NULL);
42 
43   int8_t* c0 = c;
44   int8_t* c1 = (int8_t*) ((uintptr_t) c0 + cm_stride);
45   if XNN_UNPREDICTABLE(mr < 2) {
46     c1 = c0;
47   }
48   int8_t* c2 = (int8_t*) ((uintptr_t) c1 + cm_stride);
49   if XNN_UNPREDICTABLE(mr <= 2) {
50     c2 = c1;
51   }
52   int8_t* c3 = (int8_t*) ((uintptr_t) c2 + cm_stride);
53   if XNN_UNPREDICTABLE(mr != 4) {
54     c3 = c2;
55   }
56 
57   kc = round_up_po2(kc, 8 * sizeof(int8_t));
58   do {
59     int32x4_t vacc0x0123 = vld1q_s32(w); w = (const int32_t*) w + 4;
60     int32x4_t vacc0x4567 = vld1q_s32(w); w = (const int32_t*) w + 4;
61     int32x4_t vacc0x89AB = vld1q_s32(w); w = (const int32_t*) w + 4;
62     int32x4_t vacc0xCDEF = vld1q_s32(w); w = (const int32_t*) w + 4;
63     int32x4_t vacc1x0123 = vacc0x0123;
64     int32x4_t vacc1x4567 = vacc0x4567;
65     int32x4_t vacc1x89AB = vacc0x89AB;
66     int32x4_t vacc1xCDEF = vacc0xCDEF;
67     int32x4_t vacc2x0123 = vacc0x0123;
68     int32x4_t vacc2x4567 = vacc0x4567;
69     int32x4_t vacc2x89AB = vacc0x89AB;
70     int32x4_t vacc2xCDEF = vacc0xCDEF;
71     int32x4_t vacc3x0123 = vacc0x0123;
72     int32x4_t vacc3x4567 = vacc0x4567;
73     int32x4_t vacc3x89AB = vacc0x89AB;
74     int32x4_t vacc3xCDEF = vacc0xCDEF;
75 
76     size_t p = ks;
77     do {
78       const int8_t* restrict a0 = a[0];
79       if XNN_UNPREDICTABLE(a0 != zero) {
80         a0 = (const int8_t*) ((uintptr_t) a0 + a_offset);
81       }
82       const int8_t* restrict a1 = a[1];
83       if XNN_UNPREDICTABLE(a1 != zero) {
84         a1 = (const int8_t*) ((uintptr_t) a1 + a_offset);
85       }
86       const int8_t* restrict a2 = a[2];
87       if XNN_UNPREDICTABLE(a2 != zero) {
88         a2 = (const int8_t*) ((uintptr_t) a2 + a_offset);
89       }
90       const int8_t* restrict a3 = a[3];
91       if XNN_UNPREDICTABLE(a3 != zero) {
92         a3 = (const int8_t*) ((uintptr_t) a3 + a_offset);
93       }
94       a += 4;
95 
96       size_t k = kc;
97       while (k >= 16 * sizeof(int8_t)) {
98         int8x8_t va0x0 = vld1_s8(a0); a0 += 8;
99         int8x8_t va0x1 = vld1_s8(a0); a0 += 8;
100         int8x8_t va1x0 = vld1_s8(a1); a1 += 8;
101         int8x8_t va1x1 = vld1_s8(a1); a1 += 8;
102         int8x8_t va2x0 = vld1_s8(a2); a2 += 8;
103         int8x8_t va2x1 = vld1_s8(a2); a2 += 8;
104         int8x8_t va3x0 = vld1_s8(a3); a3 += 8;
105         int8x8_t va3x1 = vld1_s8(a3); a3 += 8;
106 
107         const int8x8_t vb0123c0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
108         const int8x8_t vb4567c0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
109         const int8x8_t vb89ABc0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
110         const int8x8_t vbCDEFc0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
111         const int8x8_t vb0123c1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
112         const int8x8_t vb4567c1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
113         const int8x8_t vb89ABc1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
114         const int8x8_t vbCDEFc1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
115         const int8x8_t vb0123c2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
116         const int8x8_t vb4567c2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
117         const int8x8_t vb89ABc2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
118         const int8x8_t vbCDEFc2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
119         const int8x8_t vb0123c3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
120         const int8x8_t vb4567c3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
121         const int8x8_t vb89ABc3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
122         const int8x8_t vbCDEFc3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
123 
124         int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0x0, va0x0);
125         int16x8_t vprod1x0123c0 = vmull_s8(vb0123c0x0, va1x0);
126         int16x8_t vprod2x0123c0 = vmull_s8(vb0123c0x0, va2x0);
127         int16x8_t vprod3x0123c0 = vmull_s8(vb0123c0x0, va3x0);
128         const int8x8_t vb0123c0x1 = vld1_s8(w); w = (const int8_t*) w + 8;
129         vprod0x0123c0 = vmlal_s8(vprod0x0123c0, vb0123c0x1, va0x1);
130         vprod1x0123c0 = vmlal_s8(vprod1x0123c0, vb0123c0x1, va1x1);
131         vprod2x0123c0 = vmlal_s8(vprod2x0123c0, vb0123c0x1, va2x1);
132         vprod3x0123c0 = vmlal_s8(vprod3x0123c0, vb0123c0x1, va3x1);
133         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c0);
134         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c0);
135         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c0);
136         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c0);
137         int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0x0, va0x0);
138         int16x8_t vprod1x4567c0 = vmull_s8(vb4567c0x0, va1x0);
139         int16x8_t vprod2x4567c0 = vmull_s8(vb4567c0x0, va2x0);
140         int16x8_t vprod3x4567c0 = vmull_s8(vb4567c0x0, va3x0);
141         const int8x8_t vb4567c0x1 = vld1_s8(w); w = (const int8_t*) w + 8;
142         vprod0x4567c0 = vmlal_s8(vprod0x4567c0, vb4567c0x1, va0x1);
143         vprod1x4567c0 = vmlal_s8(vprod1x4567c0, vb4567c0x1, va1x1);
144         vprod2x4567c0 = vmlal_s8(vprod2x4567c0, vb4567c0x1, va2x1);
145         vprod3x4567c0 = vmlal_s8(vprod3x4567c0, vb4567c0x1, va3x1);
146         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c0);
147         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c0);
148         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c0);
149         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c0);
150         int16x8_t vprod0x89ABc0 = vmull_s8(vb89ABc0x0, va0x0);
151         int16x8_t vprod1x89ABc0 = vmull_s8(vb89ABc0x0, va1x0);
152         int16x8_t vprod2x89ABc0 = vmull_s8(vb89ABc0x0, va2x0);
153         int16x8_t vprod3x89ABc0 = vmull_s8(vb89ABc0x0, va3x0);
154         const int8x8_t vb89ABc0x1 = vld1_s8(w); w = (const int8_t*) w + 8;
155         vprod0x89ABc0 = vmlal_s8(vprod0x89ABc0, vb89ABc0x1, va0x1);
156         vprod1x89ABc0 = vmlal_s8(vprod1x89ABc0, vb89ABc0x1, va1x1);
157         vprod2x89ABc0 = vmlal_s8(vprod2x89ABc0, vb89ABc0x1, va2x1);
158         vprod3x89ABc0 = vmlal_s8(vprod3x89ABc0, vb89ABc0x1, va3x1);
159         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc0);
160         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc0);
161         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc0);
162         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc0);
163         int16x8_t vprod0xCDEFc0 = vmull_s8(vbCDEFc0x0, va0x0);
164         int16x8_t vprod1xCDEFc0 = vmull_s8(vbCDEFc0x0, va1x0);
165         int16x8_t vprod2xCDEFc0 = vmull_s8(vbCDEFc0x0, va2x0);
166         int16x8_t vprod3xCDEFc0 = vmull_s8(vbCDEFc0x0, va3x0);
167         const int8x8_t vbCDEFc0x1 = vld1_s8(w); w = (const int8_t*) w + 8;
168         vprod0xCDEFc0 = vmlal_s8(vprod0xCDEFc0, vbCDEFc0x1, va0x1);
169         vprod1xCDEFc0 = vmlal_s8(vprod1xCDEFc0, vbCDEFc0x1, va1x1);
170         vprod2xCDEFc0 = vmlal_s8(vprod2xCDEFc0, vbCDEFc0x1, va2x1);
171         vprod3xCDEFc0 = vmlal_s8(vprod3xCDEFc0, vbCDEFc0x1, va3x1);
172         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc0);
173         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc0);
174         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc0);
175         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc0);
176         va0x0 = vext_s8(va0x0, va0x0, 2);
177         va0x1 = vext_s8(va0x1, va0x1, 2);
178         va1x0 = vext_s8(va1x0, va1x0, 2);
179         va1x1 = vext_s8(va1x1, va1x1, 2);
180         va2x0 = vext_s8(va2x0, va2x0, 2);
181         va2x1 = vext_s8(va2x1, va2x1, 2);
182         va3x0 = vext_s8(va3x0, va3x0, 2);
183         va3x1 = vext_s8(va3x1, va3x1, 2);
184         int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1x0, va0x0);
185         int16x8_t vprod1x0123c1 = vmull_s8(vb0123c1x0, va1x0);
186         int16x8_t vprod2x0123c1 = vmull_s8(vb0123c1x0, va2x0);
187         int16x8_t vprod3x0123c1 = vmull_s8(vb0123c1x0, va3x0);
188         const int8x8_t vb0123c1x1 = vld1_s8(w); w = (const int8_t*) w + 8;
189         vprod0x0123c1 = vmlal_s8(vprod0x0123c1, vb0123c1x1, va0x1);
190         vprod1x0123c1 = vmlal_s8(vprod1x0123c1, vb0123c1x1, va1x1);
191         vprod2x0123c1 = vmlal_s8(vprod2x0123c1, vb0123c1x1, va2x1);
192         vprod3x0123c1 = vmlal_s8(vprod3x0123c1, vb0123c1x1, va3x1);
193         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c1);
194         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c1);
195         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c1);
196         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c1);
197         int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1x0, va0x0);
198         int16x8_t vprod1x4567c1 = vmull_s8(vb4567c1x0, va1x0);
199         int16x8_t vprod2x4567c1 = vmull_s8(vb4567c1x0, va2x0);
200         int16x8_t vprod3x4567c1 = vmull_s8(vb4567c1x0, va3x0);
201         const int8x8_t vb4567c1x1 = vld1_s8(w); w = (const int8_t*) w + 8;
202         vprod0x4567c1 = vmlal_s8(vprod0x4567c1, vb4567c1x1, va0x1);
203         vprod1x4567c1 = vmlal_s8(vprod1x4567c1, vb4567c1x1, va1x1);
204         vprod2x4567c1 = vmlal_s8(vprod2x4567c1, vb4567c1x1, va2x1);
205         vprod3x4567c1 = vmlal_s8(vprod3x4567c1, vb4567c1x1, va3x1);
206         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c1);
207         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c1);
208         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c1);
209         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c1);
210         int16x8_t vprod0x89ABc1 = vmull_s8(vb89ABc1x0, va0x0);
211         int16x8_t vprod1x89ABc1 = vmull_s8(vb89ABc1x0, va1x0);
212         int16x8_t vprod2x89ABc1 = vmull_s8(vb89ABc1x0, va2x0);
213         int16x8_t vprod3x89ABc1 = vmull_s8(vb89ABc1x0, va3x0);
214         const int8x8_t vb89ABc1x1 = vld1_s8(w); w = (const int8_t*) w + 8;
215         vprod0x89ABc1 = vmlal_s8(vprod0x89ABc1, vb89ABc1x1, va0x1);
216         vprod1x89ABc1 = vmlal_s8(vprod1x89ABc1, vb89ABc1x1, va1x1);
217         vprod2x89ABc1 = vmlal_s8(vprod2x89ABc1, vb89ABc1x1, va2x1);
218         vprod3x89ABc1 = vmlal_s8(vprod3x89ABc1, vb89ABc1x1, va3x1);
219         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc1);
220         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc1);
221         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc1);
222         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc1);
223         int16x8_t vprod0xCDEFc1 = vmull_s8(vbCDEFc1x0, va0x0);
224         int16x8_t vprod1xCDEFc1 = vmull_s8(vbCDEFc1x0, va1x0);
225         int16x8_t vprod2xCDEFc1 = vmull_s8(vbCDEFc1x0, va2x0);
226         int16x8_t vprod3xCDEFc1 = vmull_s8(vbCDEFc1x0, va3x0);
227         const int8x8_t vbCDEFc1x1 = vld1_s8(w); w = (const int8_t*) w + 8;
228         vprod0xCDEFc1 = vmlal_s8(vprod0xCDEFc1, vbCDEFc1x1, va0x1);
229         vprod1xCDEFc1 = vmlal_s8(vprod1xCDEFc1, vbCDEFc1x1, va1x1);
230         vprod2xCDEFc1 = vmlal_s8(vprod2xCDEFc1, vbCDEFc1x1, va2x1);
231         vprod3xCDEFc1 = vmlal_s8(vprod3xCDEFc1, vbCDEFc1x1, va3x1);
232         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc1);
233         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc1);
234         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc1);
235         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc1);
236         va0x0 = vext_s8(va0x0, va0x0, 2);
237         va0x1 = vext_s8(va0x1, va0x1, 2);
238         va1x0 = vext_s8(va1x0, va1x0, 2);
239         va1x1 = vext_s8(va1x1, va1x1, 2);
240         va2x0 = vext_s8(va2x0, va2x0, 2);
241         va2x1 = vext_s8(va2x1, va2x1, 2);
242         va3x0 = vext_s8(va3x0, va3x0, 2);
243         va3x1 = vext_s8(va3x1, va3x1, 2);
244         int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2x0, va0x0);
245         int16x8_t vprod1x0123c2 = vmull_s8(vb0123c2x0, va1x0);
246         int16x8_t vprod2x0123c2 = vmull_s8(vb0123c2x0, va2x0);
247         int16x8_t vprod3x0123c2 = vmull_s8(vb0123c2x0, va3x0);
248         const int8x8_t vb0123c2x1 = vld1_s8(w); w = (const int8_t*) w + 8;
249         vprod0x0123c2 = vmlal_s8(vprod0x0123c2, vb0123c2x1, va0x1);
250         vprod1x0123c2 = vmlal_s8(vprod1x0123c2, vb0123c2x1, va1x1);
251         vprod2x0123c2 = vmlal_s8(vprod2x0123c2, vb0123c2x1, va2x1);
252         vprod3x0123c2 = vmlal_s8(vprod3x0123c2, vb0123c2x1, va3x1);
253         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c2);
254         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c2);
255         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c2);
256         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c2);
257         int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2x0, va0x0);
258         int16x8_t vprod1x4567c2 = vmull_s8(vb4567c2x0, va1x0);
259         int16x8_t vprod2x4567c2 = vmull_s8(vb4567c2x0, va2x0);
260         int16x8_t vprod3x4567c2 = vmull_s8(vb4567c2x0, va3x0);
261         const int8x8_t vb4567c2x1 = vld1_s8(w); w = (const int8_t*) w + 8;
262         vprod0x4567c2 = vmlal_s8(vprod0x4567c2, vb4567c2x1, va0x1);
263         vprod1x4567c2 = vmlal_s8(vprod1x4567c2, vb4567c2x1, va1x1);
264         vprod2x4567c2 = vmlal_s8(vprod2x4567c2, vb4567c2x1, va2x1);
265         vprod3x4567c2 = vmlal_s8(vprod3x4567c2, vb4567c2x1, va3x1);
266         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c2);
267         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c2);
268         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c2);
269         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c2);
270         int16x8_t vprod0x89ABc2 = vmull_s8(vb89ABc2x0, va0x0);
271         int16x8_t vprod1x89ABc2 = vmull_s8(vb89ABc2x0, va1x0);
272         int16x8_t vprod2x89ABc2 = vmull_s8(vb89ABc2x0, va2x0);
273         int16x8_t vprod3x89ABc2 = vmull_s8(vb89ABc2x0, va3x0);
274         const int8x8_t vb89ABc2x1 = vld1_s8(w); w = (const int8_t*) w + 8;
275         vprod0x89ABc2 = vmlal_s8(vprod0x89ABc2, vb89ABc2x1, va0x1);
276         vprod1x89ABc2 = vmlal_s8(vprod1x89ABc2, vb89ABc2x1, va1x1);
277         vprod2x89ABc2 = vmlal_s8(vprod2x89ABc2, vb89ABc2x1, va2x1);
278         vprod3x89ABc2 = vmlal_s8(vprod3x89ABc2, vb89ABc2x1, va3x1);
279         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc2);
280         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc2);
281         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc2);
282         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc2);
283         int16x8_t vprod0xCDEFc2 = vmull_s8(vbCDEFc2x0, va0x0);
284         int16x8_t vprod1xCDEFc2 = vmull_s8(vbCDEFc2x0, va1x0);
285         int16x8_t vprod2xCDEFc2 = vmull_s8(vbCDEFc2x0, va2x0);
286         int16x8_t vprod3xCDEFc2 = vmull_s8(vbCDEFc2x0, va3x0);
287         const int8x8_t vbCDEFc2x1 = vld1_s8(w); w = (const int8_t*) w + 8;
288         vprod0xCDEFc2 = vmlal_s8(vprod0xCDEFc2, vbCDEFc2x1, va0x1);
289         vprod1xCDEFc2 = vmlal_s8(vprod1xCDEFc2, vbCDEFc2x1, va1x1);
290         vprod2xCDEFc2 = vmlal_s8(vprod2xCDEFc2, vbCDEFc2x1, va2x1);
291         vprod3xCDEFc2 = vmlal_s8(vprod3xCDEFc2, vbCDEFc2x1, va3x1);
292         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc2);
293         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc2);
294         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc2);
295         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc2);
296         va0x0 = vext_s8(va0x0, va0x0, 2);
297         va0x1 = vext_s8(va0x1, va0x1, 2);
298         va1x0 = vext_s8(va1x0, va1x0, 2);
299         va1x1 = vext_s8(va1x1, va1x1, 2);
300         va2x0 = vext_s8(va2x0, va2x0, 2);
301         va2x1 = vext_s8(va2x1, va2x1, 2);
302         va3x0 = vext_s8(va3x0, va3x0, 2);
303         va3x1 = vext_s8(va3x1, va3x1, 2);
304         int16x8_t vprod0x0123c3 = vmull_s8(vb0123c3x0, va0x0);
305         int16x8_t vprod1x0123c3 = vmull_s8(vb0123c3x0, va1x0);
306         int16x8_t vprod2x0123c3 = vmull_s8(vb0123c3x0, va2x0);
307         int16x8_t vprod3x0123c3 = vmull_s8(vb0123c3x0, va3x0);
308         const int8x8_t vb0123c3x1 = vld1_s8(w); w = (const int8_t*) w + 8;
309         vprod0x0123c3 = vmlal_s8(vprod0x0123c3, vb0123c3x1, va0x1);
310         vprod1x0123c3 = vmlal_s8(vprod1x0123c3, vb0123c3x1, va1x1);
311         vprod2x0123c3 = vmlal_s8(vprod2x0123c3, vb0123c3x1, va2x1);
312         vprod3x0123c3 = vmlal_s8(vprod3x0123c3, vb0123c3x1, va3x1);
313         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c3);
314         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c3);
315         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c3);
316         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c3);
317         int16x8_t vprod0x4567c3 = vmull_s8(vb4567c3x0, va0x0);
318         int16x8_t vprod1x4567c3 = vmull_s8(vb4567c3x0, va1x0);
319         int16x8_t vprod2x4567c3 = vmull_s8(vb4567c3x0, va2x0);
320         int16x8_t vprod3x4567c3 = vmull_s8(vb4567c3x0, va3x0);
321         const int8x8_t vb4567c3x1 = vld1_s8(w); w = (const int8_t*) w + 8;
322         vprod0x4567c3 = vmlal_s8(vprod0x4567c3, vb4567c3x1, va0x1);
323         vprod1x4567c3 = vmlal_s8(vprod1x4567c3, vb4567c3x1, va1x1);
324         vprod2x4567c3 = vmlal_s8(vprod2x4567c3, vb4567c3x1, va2x1);
325         vprod3x4567c3 = vmlal_s8(vprod3x4567c3, vb4567c3x1, va3x1);
326         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c3);
327         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c3);
328         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c3);
329         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c3);
330         int16x8_t vprod0x89ABc3 = vmull_s8(vb89ABc3x0, va0x0);
331         int16x8_t vprod1x89ABc3 = vmull_s8(vb89ABc3x0, va1x0);
332         int16x8_t vprod2x89ABc3 = vmull_s8(vb89ABc3x0, va2x0);
333         int16x8_t vprod3x89ABc3 = vmull_s8(vb89ABc3x0, va3x0);
334         const int8x8_t vb89ABc3x1 = vld1_s8(w); w = (const int8_t*) w + 8;
335         vprod0x89ABc3 = vmlal_s8(vprod0x89ABc3, vb89ABc3x1, va0x1);
336         vprod1x89ABc3 = vmlal_s8(vprod1x89ABc3, vb89ABc3x1, va1x1);
337         vprod2x89ABc3 = vmlal_s8(vprod2x89ABc3, vb89ABc3x1, va2x1);
338         vprod3x89ABc3 = vmlal_s8(vprod3x89ABc3, vb89ABc3x1, va3x1);
339         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc3);
340         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc3);
341         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc3);
342         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc3);
343         int16x8_t vprod0xCDEFc3 = vmull_s8(vbCDEFc3x0, va0x0);
344         int16x8_t vprod1xCDEFc3 = vmull_s8(vbCDEFc3x0, va1x0);
345         int16x8_t vprod2xCDEFc3 = vmull_s8(vbCDEFc3x0, va2x0);
346         int16x8_t vprod3xCDEFc3 = vmull_s8(vbCDEFc3x0, va3x0);
347         const int8x8_t vbCDEFc3x1 = vld1_s8(w); w = (const int8_t*) w + 8;
348         vprod0xCDEFc3 = vmlal_s8(vprod0xCDEFc3, vbCDEFc3x1, va0x1);
349         vprod1xCDEFc3 = vmlal_s8(vprod1xCDEFc3, vbCDEFc3x1, va1x1);
350         vprod2xCDEFc3 = vmlal_s8(vprod2xCDEFc3, vbCDEFc3x1, va2x1);
351         vprod3xCDEFc3 = vmlal_s8(vprod3xCDEFc3, vbCDEFc3x1, va3x1);
352         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc3);
353         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc3);
354         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc3);
355         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc3);
356 
357         k -= 16 * sizeof(int8_t);
358       }
359       if (k != 0) {
360         int8x8_t va0x0 = vld1_s8(a0); a0 += 8;
361         int8x8_t va1x0 = vld1_s8(a1); a1 += 8;
362         int8x8_t va2x0 = vld1_s8(a2); a2 += 8;
363         int8x8_t va3x0 = vld1_s8(a3); a3 += 8;
364 
365         const int8x8_t vb0123c0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
366         const int8x8_t vb4567c0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
367         const int8x8_t vb89ABc0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
368         const int8x8_t vbCDEFc0x0 = vld1_s8(w); w = (const int8_t*) w + 8;
369         const int8x8_t vb0123c1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
370         const int8x8_t vb4567c1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
371         const int8x8_t vb89ABc1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
372         const int8x8_t vbCDEFc1x0 = vld1_s8(w); w = (const int8_t*) w + 8;
373         const int8x8_t vb0123c2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
374         const int8x8_t vb4567c2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
375         const int8x8_t vb89ABc2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
376         const int8x8_t vbCDEFc2x0 = vld1_s8(w); w = (const int8_t*) w + 8;
377         const int8x8_t vb0123c3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
378         const int8x8_t vb4567c3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
379         const int8x8_t vb89ABc3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
380         const int8x8_t vbCDEFc3x0 = vld1_s8(w); w = (const int8_t*) w + 8;
381 
382         int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0x0, va0x0);
383         int16x8_t vprod1x0123c0 = vmull_s8(vb0123c0x0, va1x0);
384         int16x8_t vprod2x0123c0 = vmull_s8(vb0123c0x0, va2x0);
385         int16x8_t vprod3x0123c0 = vmull_s8(vb0123c0x0, va3x0);
386         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c0);
387         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c0);
388         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c0);
389         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c0);
390         int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0x0, va0x0);
391         int16x8_t vprod1x4567c0 = vmull_s8(vb4567c0x0, va1x0);
392         int16x8_t vprod2x4567c0 = vmull_s8(vb4567c0x0, va2x0);
393         int16x8_t vprod3x4567c0 = vmull_s8(vb4567c0x0, va3x0);
394         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c0);
395         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c0);
396         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c0);
397         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c0);
398         int16x8_t vprod0x89ABc0 = vmull_s8(vb89ABc0x0, va0x0);
399         int16x8_t vprod1x89ABc0 = vmull_s8(vb89ABc0x0, va1x0);
400         int16x8_t vprod2x89ABc0 = vmull_s8(vb89ABc0x0, va2x0);
401         int16x8_t vprod3x89ABc0 = vmull_s8(vb89ABc0x0, va3x0);
402         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc0);
403         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc0);
404         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc0);
405         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc0);
406         int16x8_t vprod0xCDEFc0 = vmull_s8(vbCDEFc0x0, va0x0);
407         int16x8_t vprod1xCDEFc0 = vmull_s8(vbCDEFc0x0, va1x0);
408         int16x8_t vprod2xCDEFc0 = vmull_s8(vbCDEFc0x0, va2x0);
409         int16x8_t vprod3xCDEFc0 = vmull_s8(vbCDEFc0x0, va3x0);
410         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc0);
411         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc0);
412         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc0);
413         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc0);
414         va0x0 = vext_s8(va0x0, va0x0, 2);
415         va1x0 = vext_s8(va1x0, va1x0, 2);
416         va2x0 = vext_s8(va2x0, va2x0, 2);
417         va3x0 = vext_s8(va3x0, va3x0, 2);
418         int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1x0, va0x0);
419         int16x8_t vprod1x0123c1 = vmull_s8(vb0123c1x0, va1x0);
420         int16x8_t vprod2x0123c1 = vmull_s8(vb0123c1x0, va2x0);
421         int16x8_t vprod3x0123c1 = vmull_s8(vb0123c1x0, va3x0);
422         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c1);
423         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c1);
424         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c1);
425         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c1);
426         int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1x0, va0x0);
427         int16x8_t vprod1x4567c1 = vmull_s8(vb4567c1x0, va1x0);
428         int16x8_t vprod2x4567c1 = vmull_s8(vb4567c1x0, va2x0);
429         int16x8_t vprod3x4567c1 = vmull_s8(vb4567c1x0, va3x0);
430         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c1);
431         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c1);
432         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c1);
433         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c1);
434         int16x8_t vprod0x89ABc1 = vmull_s8(vb89ABc1x0, va0x0);
435         int16x8_t vprod1x89ABc1 = vmull_s8(vb89ABc1x0, va1x0);
436         int16x8_t vprod2x89ABc1 = vmull_s8(vb89ABc1x0, va2x0);
437         int16x8_t vprod3x89ABc1 = vmull_s8(vb89ABc1x0, va3x0);
438         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc1);
439         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc1);
440         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc1);
441         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc1);
442         int16x8_t vprod0xCDEFc1 = vmull_s8(vbCDEFc1x0, va0x0);
443         int16x8_t vprod1xCDEFc1 = vmull_s8(vbCDEFc1x0, va1x0);
444         int16x8_t vprod2xCDEFc1 = vmull_s8(vbCDEFc1x0, va2x0);
445         int16x8_t vprod3xCDEFc1 = vmull_s8(vbCDEFc1x0, va3x0);
446         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc1);
447         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc1);
448         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc1);
449         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc1);
450         va0x0 = vext_s8(va0x0, va0x0, 2);
451         va1x0 = vext_s8(va1x0, va1x0, 2);
452         va2x0 = vext_s8(va2x0, va2x0, 2);
453         va3x0 = vext_s8(va3x0, va3x0, 2);
454         int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2x0, va0x0);
455         int16x8_t vprod1x0123c2 = vmull_s8(vb0123c2x0, va1x0);
456         int16x8_t vprod2x0123c2 = vmull_s8(vb0123c2x0, va2x0);
457         int16x8_t vprod3x0123c2 = vmull_s8(vb0123c2x0, va3x0);
458         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c2);
459         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c2);
460         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c2);
461         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c2);
462         int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2x0, va0x0);
463         int16x8_t vprod1x4567c2 = vmull_s8(vb4567c2x0, va1x0);
464         int16x8_t vprod2x4567c2 = vmull_s8(vb4567c2x0, va2x0);
465         int16x8_t vprod3x4567c2 = vmull_s8(vb4567c2x0, va3x0);
466         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c2);
467         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c2);
468         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c2);
469         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c2);
470         int16x8_t vprod0x89ABc2 = vmull_s8(vb89ABc2x0, va0x0);
471         int16x8_t vprod1x89ABc2 = vmull_s8(vb89ABc2x0, va1x0);
472         int16x8_t vprod2x89ABc2 = vmull_s8(vb89ABc2x0, va2x0);
473         int16x8_t vprod3x89ABc2 = vmull_s8(vb89ABc2x0, va3x0);
474         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc2);
475         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc2);
476         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc2);
477         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc2);
478         int16x8_t vprod0xCDEFc2 = vmull_s8(vbCDEFc2x0, va0x0);
479         int16x8_t vprod1xCDEFc2 = vmull_s8(vbCDEFc2x0, va1x0);
480         int16x8_t vprod2xCDEFc2 = vmull_s8(vbCDEFc2x0, va2x0);
481         int16x8_t vprod3xCDEFc2 = vmull_s8(vbCDEFc2x0, va3x0);
482         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc2);
483         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc2);
484         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc2);
485         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc2);
486         va0x0 = vext_s8(va0x0, va0x0, 2);
487         va1x0 = vext_s8(va1x0, va1x0, 2);
488         va2x0 = vext_s8(va2x0, va2x0, 2);
489         va3x0 = vext_s8(va3x0, va3x0, 2);
490         int16x8_t vprod0x0123c3 = vmull_s8(vb0123c3x0, va0x0);
491         int16x8_t vprod1x0123c3 = vmull_s8(vb0123c3x0, va1x0);
492         int16x8_t vprod2x0123c3 = vmull_s8(vb0123c3x0, va2x0);
493         int16x8_t vprod3x0123c3 = vmull_s8(vb0123c3x0, va3x0);
494         vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c3);
495         vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c3);
496         vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c3);
497         vacc3x0123 = vpadalq_s16(vacc3x0123, vprod3x0123c3);
498         int16x8_t vprod0x4567c3 = vmull_s8(vb4567c3x0, va0x0);
499         int16x8_t vprod1x4567c3 = vmull_s8(vb4567c3x0, va1x0);
500         int16x8_t vprod2x4567c3 = vmull_s8(vb4567c3x0, va2x0);
501         int16x8_t vprod3x4567c3 = vmull_s8(vb4567c3x0, va3x0);
502         vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c3);
503         vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c3);
504         vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c3);
505         vacc3x4567 = vpadalq_s16(vacc3x4567, vprod3x4567c3);
506         int16x8_t vprod0x89ABc3 = vmull_s8(vb89ABc3x0, va0x0);
507         int16x8_t vprod1x89ABc3 = vmull_s8(vb89ABc3x0, va1x0);
508         int16x8_t vprod2x89ABc3 = vmull_s8(vb89ABc3x0, va2x0);
509         int16x8_t vprod3x89ABc3 = vmull_s8(vb89ABc3x0, va3x0);
510         vacc0x89AB = vpadalq_s16(vacc0x89AB, vprod0x89ABc3);
511         vacc1x89AB = vpadalq_s16(vacc1x89AB, vprod1x89ABc3);
512         vacc2x89AB = vpadalq_s16(vacc2x89AB, vprod2x89ABc3);
513         vacc3x89AB = vpadalq_s16(vacc3x89AB, vprod3x89ABc3);
514         int16x8_t vprod0xCDEFc3 = vmull_s8(vbCDEFc3x0, va0x0);
515         int16x8_t vprod1xCDEFc3 = vmull_s8(vbCDEFc3x0, va1x0);
516         int16x8_t vprod2xCDEFc3 = vmull_s8(vbCDEFc3x0, va2x0);
517         int16x8_t vprod3xCDEFc3 = vmull_s8(vbCDEFc3x0, va3x0);
518         vacc0xCDEF = vpadalq_s16(vacc0xCDEF, vprod0xCDEFc3);
519         vacc1xCDEF = vpadalq_s16(vacc1xCDEF, vprod1xCDEFc3);
520         vacc2xCDEF = vpadalq_s16(vacc2xCDEF, vprod2xCDEFc3);
521         vacc3xCDEF = vpadalq_s16(vacc3xCDEF, vprod3xCDEFc3);
522 
523       }
524 
525       p -= 4 * sizeof(void*);
526     } while (p != 0);
527 
528     const int32x4_t vright_pre_shift = vld1q_dup_s32(&params->rndnu_neon.right_pre_shift);
529     const int32x4_t vmultiplier = vld1q_dup_s32(&params->rndnu_neon.multiplier);
530     const int32x4_t vright_post_shift = vld1q_dup_s32(&params->rndnu_neon.right_post_shift);
531 
532     vacc0x0123 = vqshlq_s32(vacc0x0123, vright_pre_shift);
533     vacc0x4567 = vqshlq_s32(vacc0x4567, vright_pre_shift);
534     vacc0x89AB = vqshlq_s32(vacc0x89AB, vright_pre_shift);
535     vacc0xCDEF = vqshlq_s32(vacc0xCDEF, vright_pre_shift);
536     vacc1x0123 = vqshlq_s32(vacc1x0123, vright_pre_shift);
537     vacc1x4567 = vqshlq_s32(vacc1x4567, vright_pre_shift);
538     vacc1x89AB = vqshlq_s32(vacc1x89AB, vright_pre_shift);
539     vacc1xCDEF = vqshlq_s32(vacc1xCDEF, vright_pre_shift);
540     vacc2x0123 = vqshlq_s32(vacc2x0123, vright_pre_shift);
541     vacc2x4567 = vqshlq_s32(vacc2x4567, vright_pre_shift);
542     vacc2x89AB = vqshlq_s32(vacc2x89AB, vright_pre_shift);
543     vacc2xCDEF = vqshlq_s32(vacc2xCDEF, vright_pre_shift);
544     vacc3x0123 = vqshlq_s32(vacc3x0123, vright_pre_shift);
545     vacc3x4567 = vqshlq_s32(vacc3x4567, vright_pre_shift);
546     vacc3x89AB = vqshlq_s32(vacc3x89AB, vright_pre_shift);
547     vacc3xCDEF = vqshlq_s32(vacc3xCDEF, vright_pre_shift);
548 
549     vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
550     vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
551     vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
552     vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
553     vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
554     vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
555     vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
556     vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
557     vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
558     vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
559     vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
560     vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
561     vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
562     vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
563     vacc3x89AB = vqdmulhq_s32(vacc3x89AB, vmultiplier);
564     vacc3xCDEF = vqdmulhq_s32(vacc3xCDEF, vmultiplier);
565 
566     vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
567     vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
568     vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
569     vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
570     vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
571     vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
572     vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
573     vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
574     vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
575     vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
576     vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
577     vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
578     vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
579     vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
580     vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_post_shift);
581     vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_post_shift);
582 
583     const int16x8_t voutput_zero_point = vld1q_dup_s16(&params->rndnu_neon.output_zero_point);
584 #if XNN_ARCH_ARM64
585     int16x8_t vacc0x01234567 = vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567);
586     int16x8_t vacc0x89ABCDEF = vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF);
587     int16x8_t vacc1x01234567 = vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567);
588     int16x8_t vacc1x89ABCDEF = vqmovn_high_s32(vqmovn_s32(vacc1x89AB), vacc1xCDEF);
589     int16x8_t vacc2x01234567 = vqmovn_high_s32(vqmovn_s32(vacc2x0123), vacc2x4567);
590     int16x8_t vacc2x89ABCDEF = vqmovn_high_s32(vqmovn_s32(vacc2x89AB), vacc2xCDEF);
591     int16x8_t vacc3x01234567 = vqmovn_high_s32(vqmovn_s32(vacc3x0123), vacc3x4567);
592     int16x8_t vacc3x89ABCDEF = vqmovn_high_s32(vqmovn_s32(vacc3x89AB), vacc3xCDEF);
593 
594     vacc0x01234567 = vqaddq_s16(vacc0x01234567, voutput_zero_point);
595     vacc0x89ABCDEF = vqaddq_s16(vacc0x89ABCDEF, voutput_zero_point);
596     vacc1x01234567 = vqaddq_s16(vacc1x01234567, voutput_zero_point);
597     vacc1x89ABCDEF = vqaddq_s16(vacc1x89ABCDEF, voutput_zero_point);
598     vacc2x01234567 = vqaddq_s16(vacc2x01234567, voutput_zero_point);
599     vacc2x89ABCDEF = vqaddq_s16(vacc2x89ABCDEF, voutput_zero_point);
600     vacc3x01234567 = vqaddq_s16(vacc3x01234567, voutput_zero_point);
601     vacc3x89ABCDEF = vqaddq_s16(vacc3x89ABCDEF, voutput_zero_point);
602 
603     int8x16_t vout0x0123456789ABCDEF = vqmovn_high_s16(vqmovn_s16(vacc0x01234567), vacc0x89ABCDEF);
604     int8x16_t vout1x0123456789ABCDEF = vqmovn_high_s16(vqmovn_s16(vacc1x01234567), vacc1x89ABCDEF);
605     int8x16_t vout2x0123456789ABCDEF = vqmovn_high_s16(vqmovn_s16(vacc2x01234567), vacc2x89ABCDEF);
606     int8x16_t vout3x0123456789ABCDEF = vqmovn_high_s16(vqmovn_s16(vacc3x01234567), vacc3x89ABCDEF);
607 #else
608     int16x8_t vacc0x01234567 = vcombine_s16(vqmovn_s32(vacc0x0123), vqmovn_s32(vacc0x4567));
609     int16x8_t vacc0x89ABCDEF = vcombine_s16(vqmovn_s32(vacc0x89AB), vqmovn_s32(vacc0xCDEF));
610     int16x8_t vacc1x01234567 = vcombine_s16(vqmovn_s32(vacc1x0123), vqmovn_s32(vacc1x4567));
611     int16x8_t vacc1x89ABCDEF = vcombine_s16(vqmovn_s32(vacc1x89AB), vqmovn_s32(vacc1xCDEF));
612     int16x8_t vacc2x01234567 = vcombine_s16(vqmovn_s32(vacc2x0123), vqmovn_s32(vacc2x4567));
613     int16x8_t vacc2x89ABCDEF = vcombine_s16(vqmovn_s32(vacc2x89AB), vqmovn_s32(vacc2xCDEF));
614     int16x8_t vacc3x01234567 = vcombine_s16(vqmovn_s32(vacc3x0123), vqmovn_s32(vacc3x4567));
615     int16x8_t vacc3x89ABCDEF = vcombine_s16(vqmovn_s32(vacc3x89AB), vqmovn_s32(vacc3xCDEF));
616 
617     vacc0x01234567 = vqaddq_s16(vacc0x01234567, voutput_zero_point);
618     vacc0x89ABCDEF = vqaddq_s16(vacc0x89ABCDEF, voutput_zero_point);
619     vacc1x01234567 = vqaddq_s16(vacc1x01234567, voutput_zero_point);
620     vacc1x89ABCDEF = vqaddq_s16(vacc1x89ABCDEF, voutput_zero_point);
621     vacc2x01234567 = vqaddq_s16(vacc2x01234567, voutput_zero_point);
622     vacc2x89ABCDEF = vqaddq_s16(vacc2x89ABCDEF, voutput_zero_point);
623     vacc3x01234567 = vqaddq_s16(vacc3x01234567, voutput_zero_point);
624     vacc3x89ABCDEF = vqaddq_s16(vacc3x89ABCDEF, voutput_zero_point);
625 
626     int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
627     int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
628     int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
629     int8x16_t vout3x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc3x01234567), vqmovn_s16(vacc3x89ABCDEF));
630 #endif
631 
632     const int8x16_t voutput_min = vld1q_dup_s8(&params->rndnu_neon.output_min);
633     vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
634     vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
635     vout2x0123456789ABCDEF = vmaxq_s8(vout2x0123456789ABCDEF, voutput_min);
636     vout3x0123456789ABCDEF = vmaxq_s8(vout3x0123456789ABCDEF, voutput_min);
637 
638     const int8x16_t voutput_max = vld1q_dup_s8(&params->rndnu_neon.output_max);
639     vout0x0123456789ABCDEF = vminq_s8(vout0x0123456789ABCDEF, voutput_max);
640     vout1x0123456789ABCDEF = vminq_s8(vout1x0123456789ABCDEF, voutput_max);
641     vout2x0123456789ABCDEF = vminq_s8(vout2x0123456789ABCDEF, voutput_max);
642     vout3x0123456789ABCDEF = vminq_s8(vout3x0123456789ABCDEF, voutput_max);
643 
644     if (nc >= 16) {
645       vst1q_s8(c3 + 0, vout3x0123456789ABCDEF);
646       vst1q_s8(c2 + 0, vout2x0123456789ABCDEF);
647       vst1q_s8(c1 + 0, vout1x0123456789ABCDEF);
648       vst1q_s8(c0 + 0, vout0x0123456789ABCDEF);
649 
650       c3 = (int8_t*) ((uintptr_t) c3 + cn_stride);
651       c2 = (int8_t*) ((uintptr_t) c2 + cn_stride);
652       c1 = (int8_t*) ((uintptr_t) c1 + cn_stride);
653       c0 = (int8_t*) ((uintptr_t) c0 + cn_stride);
654 
655       a = (const int8_t**restrict) ((uintptr_t) a - ks);
656 
657       nc -= 16;
658     } else {
659       int8x16_t vout2x01234567_3x01234567 = vcombine_s8(vget_low_s8(vout2x0123456789ABCDEF), vget_low_s8(vout3x0123456789ABCDEF));
660       int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vget_low_s8(vout0x0123456789ABCDEF), vget_low_s8(vout1x0123456789ABCDEF));
661       if (nc & 8) {
662         vst1_s8(c3, vget_high_s8(vout2x01234567_3x01234567)); c3 += 8;
663         vst1_s8(c2, vget_low_s8(vout2x01234567_3x01234567)); c2 += 8;
664         vst1_s8(c1, vget_high_s8(vout0x01234567_1x01234567)); c1 += 8;
665         vst1_s8(c0, vget_low_s8(vout0x01234567_1x01234567)); c0 += 8;
666         vout2x01234567_3x01234567 = vcombine_s8(vget_high_s8(vout2x0123456789ABCDEF), vget_high_s8(vout3x0123456789ABCDEF));
667         vout0x01234567_1x01234567 = vcombine_s8(vget_high_s8(vout0x0123456789ABCDEF), vget_high_s8(vout1x0123456789ABCDEF));
668       }
669       if (nc & 4) {
670         vst1q_lane_u32((void*) c3, vreinterpretq_u32_s8(vout2x01234567_3x01234567), 2); c3 += 4;
671         vst1q_lane_u32((void*) c2, vreinterpretq_u32_s8(vout2x01234567_3x01234567), 0); c2 += 4;
672         vst1q_lane_u32((void*) c1, vreinterpretq_u32_s8(vout0x01234567_1x01234567), 2); c1 += 4;
673         vst1q_lane_u32((void*) c0, vreinterpretq_u32_s8(vout0x01234567_1x01234567), 0); c0 += 4;
674         vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4);
675         vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4);
676       }
677       if (nc & 2) {
678         vst1q_lane_u16((void*) c3, vreinterpretq_u16_s8(vout2x01234567_3x01234567), 4); c3 += 2;
679         vst1q_lane_u16((void*) c2, vreinterpretq_u16_s8(vout2x01234567_3x01234567), 0); c2 += 2;
680         vst1q_lane_u16((void*) c1, vreinterpretq_u16_s8(vout0x01234567_1x01234567), 4); c1 += 2;
681         vst1q_lane_u16((void*) c0, vreinterpretq_u16_s8(vout0x01234567_1x01234567), 0); c0 += 2;
682         vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2);
683         vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2);
684       }
685       if (nc & 1) {
686         vst1q_lane_s8(c3, vout2x01234567_3x01234567, 8);
687         vst1q_lane_s8(c2, vout2x01234567_3x01234567, 0);
688         vst1q_lane_s8(c1, vout0x01234567_1x01234567, 8);
689         vst1q_lane_s8(c0, vout0x01234567_1x01234567, 0);
690       }
691 
692       nc = 0;
693     }
694   } while (nc != 0);
695 }
696