1*412f47f9SXin Li/* 2*412f47f9SXin Li * strchr - find a character in a string 3*412f47f9SXin Li * 4*412f47f9SXin Li * Copyright (c) 2020-2022, Arm Limited. 5*412f47f9SXin Li * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception 6*412f47f9SXin Li */ 7*412f47f9SXin Li 8*412f47f9SXin Li/* Assumptions: 9*412f47f9SXin Li * 10*412f47f9SXin Li * ARMv8-a, AArch64, Advanced SIMD. 11*412f47f9SXin Li * MTE compatible. 12*412f47f9SXin Li */ 13*412f47f9SXin Li 14*412f47f9SXin Li#include "asmdefs.h" 15*412f47f9SXin Li 16*412f47f9SXin Li#define srcin x0 17*412f47f9SXin Li#define chrin w1 18*412f47f9SXin Li#define result x0 19*412f47f9SXin Li 20*412f47f9SXin Li#define src x2 21*412f47f9SXin Li#define tmp1 x1 22*412f47f9SXin Li#define tmp2 x3 23*412f47f9SXin Li 24*412f47f9SXin Li#define vrepchr v0 25*412f47f9SXin Li#define vdata v1 26*412f47f9SXin Li#define qdata q1 27*412f47f9SXin Li#define vhas_nul v2 28*412f47f9SXin Li#define vhas_chr v3 29*412f47f9SXin Li#define vrepmask v4 30*412f47f9SXin Li#define vend v5 31*412f47f9SXin Li#define dend d5 32*412f47f9SXin Li 33*412f47f9SXin Li/* Core algorithm. 34*412f47f9SXin Li 35*412f47f9SXin Li For each 16-byte chunk we calculate a 64-bit syndrome value with four bits 36*412f47f9SXin Li per byte. Bits 0-1 are set if the relevant byte matched the requested 37*412f47f9SXin Li character, bits 2-3 are set if the byte is NUL or matched. Count trailing 38*412f47f9SXin Li zeroes gives the position of the matching byte if it is a multiple of 4. 39*412f47f9SXin Li If it is not a multiple of 4, there was no match. */ 40*412f47f9SXin Li 41*412f47f9SXin LiENTRY (__strchr_aarch64_mte) 42*412f47f9SXin Li PTR_ARG (0) 43*412f47f9SXin Li bic src, srcin, 15 44*412f47f9SXin Li dup vrepchr.16b, chrin 45*412f47f9SXin Li ld1 {vdata.16b}, [src] 46*412f47f9SXin Li movi vrepmask.16b, 0x33 47*412f47f9SXin Li cmeq vhas_nul.16b, vdata.16b, 0 48*412f47f9SXin Li cmeq vhas_chr.16b, vdata.16b, vrepchr.16b 49*412f47f9SXin Li bit vhas_nul.16b, vhas_chr.16b, vrepmask.16b 50*412f47f9SXin Li lsl tmp2, srcin, 2 51*412f47f9SXin Li shrn vend.8b, vhas_nul.8h, 4 /* 128->64 */ 52*412f47f9SXin Li fmov tmp1, dend 53*412f47f9SXin Li lsr tmp1, tmp1, tmp2 54*412f47f9SXin Li cbz tmp1, L(loop) 55*412f47f9SXin Li 56*412f47f9SXin Li rbit tmp1, tmp1 57*412f47f9SXin Li clz tmp1, tmp1 58*412f47f9SXin Li /* Tmp1 is an even multiple of 2 if the target character was 59*412f47f9SXin Li found first. Otherwise we've found the end of string. */ 60*412f47f9SXin Li tst tmp1, 2 61*412f47f9SXin Li add result, srcin, tmp1, lsr 2 62*412f47f9SXin Li csel result, result, xzr, eq 63*412f47f9SXin Li ret 64*412f47f9SXin Li 65*412f47f9SXin Li .p2align 4 66*412f47f9SXin LiL(loop): 67*412f47f9SXin Li ldr qdata, [src, 16] 68*412f47f9SXin Li cmeq vhas_chr.16b, vdata.16b, vrepchr.16b 69*412f47f9SXin Li cmhs vhas_nul.16b, vhas_chr.16b, vdata.16b 70*412f47f9SXin Li umaxp vend.16b, vhas_nul.16b, vhas_nul.16b 71*412f47f9SXin Li fmov tmp1, dend 72*412f47f9SXin Li cbnz tmp1, L(end) 73*412f47f9SXin Li ldr qdata, [src, 32]! 74*412f47f9SXin Li cmeq vhas_chr.16b, vdata.16b, vrepchr.16b 75*412f47f9SXin Li cmhs vhas_nul.16b, vhas_chr.16b, vdata.16b 76*412f47f9SXin Li umaxp vend.16b, vhas_nul.16b, vhas_nul.16b 77*412f47f9SXin Li fmov tmp1, dend 78*412f47f9SXin Li cbz tmp1, L(loop) 79*412f47f9SXin Li sub src, src, 16 80*412f47f9SXin LiL(end): 81*412f47f9SXin Li 82*412f47f9SXin Li#ifdef __AARCH64EB__ 83*412f47f9SXin Li bif vhas_nul.16b, vhas_chr.16b, vrepmask.16b 84*412f47f9SXin Li shrn vend.8b, vhas_nul.8h, 4 /* 128->64 */ 85*412f47f9SXin Li fmov tmp1, dend 86*412f47f9SXin Li#else 87*412f47f9SXin Li bit vhas_nul.16b, vhas_chr.16b, vrepmask.16b 88*412f47f9SXin Li shrn vend.8b, vhas_nul.8h, 4 /* 128->64 */ 89*412f47f9SXin Li fmov tmp1, dend 90*412f47f9SXin Li rbit tmp1, tmp1 91*412f47f9SXin Li#endif 92*412f47f9SXin Li add src, src, 16 93*412f47f9SXin Li clz tmp1, tmp1 94*412f47f9SXin Li /* Tmp1 is a multiple of 4 if the target character was found. */ 95*412f47f9SXin Li tst tmp1, 2 96*412f47f9SXin Li add result, src, tmp1, lsr 2 97*412f47f9SXin Li csel result, result, xzr, eq 98*412f47f9SXin Li ret 99*412f47f9SXin Li 100*412f47f9SXin LiEND (__strchr_aarch64_mte) 101*412f47f9SXin Li 102