Searched refs:load_unaligned_zeropad (Results 1 – 16 of 16) sorted by relevance
2 load_unaligned_zeropad
2 TEST_GEN_PROGS := load_unaligned_zeropad
111 got = load_unaligned_zeropad(p); in do_one_test()
167 static inline unsigned long load_unaligned_zeropad(const void *addr) in load_unaligned_zeropad() function
61 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & in __siphash_aligned()94 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & in __siphash_unaligned()261 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & in __hsiphash_aligned()294 b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) & in __hsiphash_unaligned()
153 c = load_unaligned_zeropad(src+res); in sized_strscpy()
50 static inline unsigned long load_unaligned_zeropad(const void *addr) in load_unaligned_zeropad() function
59 static inline unsigned long load_unaligned_zeropad(const void *addr) in load_unaligned_zeropad() function
51 static inline unsigned long load_unaligned_zeropad(const void *addr) in load_unaligned_zeropad() function
68 static inline unsigned long load_unaligned_zeropad(const void *addr) in load_unaligned_zeropad() function
70 static inline unsigned long load_unaligned_zeropad(const void *addr) in load_unaligned_zeropad() function
104 trail = (load_unaligned_zeropad(buff) << shift) >> shift; in csum_partial()
235 load_unaligned_zeropad()245 However, the kernel load_unaligned_zeropad() mechanism may make stray251 the guest kernel, and in such a case, the load_unaligned_zeropad() fixup code256 is in progress. If load_unaligned_zeropad() causes a stray reference, a258 based handlers for load_unaligned_zeropad() fixup the reference. When the
2236 a = load_unaligned_zeropad(name); in full_name_hash()2263 a = load_unaligned_zeropad(name+len); in hashlen_string()2290 a = load_unaligned_zeropad(name); in hash_name()2309 a = load_unaligned_zeropad(name+len); in hash_name()
244 b = load_unaligned_zeropad(ct); in dentry_string_cmp()