Lines Matching +full:64 +full:-

1 // SPDX-License-Identifier: GPL-2.0-only
2 // Copyright (C) 2019-2020 Arm Ltd.
5 #include <linux/kasan-checks.h>
10 /* Looks dumb, but generates nice-ish code */
14 return tmp + (tmp >> 64); in accumulate()
18 * We over-read the buffer and this makes KASAN unhappy. Instead, disable
34 * should absolutely not be pointing to anything read-sensitive. We do, in do_csum()
37 * compensate with an explicit check up-front. in do_csum()
40 ptr = (u64 *)(buff - offset); in do_csum()
41 len = len + offset - 8; in do_csum()
62 while (unlikely(len > 64)) { in do_csum()
70 len -= 64; in do_csum()
74 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum()
75 tmp2 += (tmp2 >> 64) | (tmp2 << 64); in do_csum()
76 tmp3 += (tmp3 >> 64) | (tmp3 << 64); in do_csum()
77 tmp4 += (tmp4 >> 64) | (tmp4 << 64); in do_csum()
78 tmp1 = ((tmp1 >> 64) << 64) | (tmp2 >> 64); in do_csum()
79 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum()
80 tmp3 = ((tmp3 >> 64) << 64) | (tmp4 >> 64); in do_csum()
81 tmp3 += (tmp3 >> 64) | (tmp3 << 64); in do_csum()
82 tmp1 = ((tmp1 >> 64) << 64) | (tmp3 >> 64); in do_csum()
83 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum()
84 tmp1 = ((tmp1 >> 64) << 64) | sum64; in do_csum()
85 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum()
86 sum64 = tmp1 >> 64; in do_csum()
94 len -= 16; in do_csum()
98 data = tmp >> 64; in do_csum()
102 sum64 = accumulate(sum64, tmp >> 64); in do_csum()
108 len -= 8; in do_csum()
111 * Tail: zero any over-read bytes similarly to the head, again in do_csum()
114 shift = len * -8; in do_csum()
139 src = *(const __uint128_t *)saddr->s6_addr; in csum_ipv6_magic()
140 dst = *(const __uint128_t *)daddr->s6_addr; in csum_ipv6_magic()
148 src += (src >> 64) | (src << 64); in csum_ipv6_magic()
149 dst += (dst >> 64) | (dst << 64); in csum_ipv6_magic()
151 sum = accumulate(sum, src >> 64); in csum_ipv6_magic()
152 sum = accumulate(sum, dst >> 64); in csum_ipv6_magic()