1/****************************************************************************** 2 * Copyright © 2018, VideoLAN and dav1d authors 3 * Copyright © 2024, Bogdan Gligorijevic 4 * All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions are met: 8 * 9 * 1. Redistributions of source code must retain the above copyright notice, this 10 * list of conditions and the following disclaimer. 11 * 12 * 2. Redistributions in binary form must reproduce the above copyright notice, 13 * this list of conditions and the following disclaimer in the documentation 14 * and/or other materials provided with the distribution. 15 * 16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 20 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 23 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 25 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 *****************************************************************************/ 27 28#include "src/riscv/asm.S" 29 30function pal_idx_finish_rvv, export=1, ext="v,zba,zbb" 31 csrw vxrm, zero 32 srl t0, a2, 1 33 sub a2, a2, a4 34 srl t1, a4, 1 35 mv t2, a5 36 37 csrr t6, vlenb 38 li t4, -3 39 ctz a6, t0 40 ctz t6, t6 41 li a7, 16 42 sub a6, a6, t6 43 li t6, 1<<4+1 44 45 // a6 is never > 3 for VLEN >=128 46 // that would've required stripmining with a6 set to 3 47 max a6, a6, t4 48 li t5, 2 49 andi a6, a6, 7 50 addi t4, a1, 1 51 ori a6, a6, 0xc0 52 531: 54 sub t3, t0, t1 55 vsetvl zero, t1, a6 56 vlse8.v v0, (a1), t5 57 sh1add a1, t1, a1 58 vlse8.v v8, (t4), t5 59 sh1add t4, t1, t4 60 vmacc.vx v0, a7, v8 61 vse8.v v0, (a0) 62 add a0, a0, t1 63 ble t3, zero, 4f 64 65 lbu a4, -1(a1) 66 mul a4, a4, t6 67 vsetvl zero, t3, a6 68 vmv.v.x v0, a4 69 vse8.v v0, (a0) 70 add a0, a0, t3 714: 72 addi t2, t2, -1 73 add a1, a1, a2 74 add t4, t4, a2 75 bnez t2, 1b 76 77 sub t1, a3, a5 78 79 sub t2, a0, t0 80 ble t1, zero, 7f 81 82 vsetvl zero, t0, a6 83 vle8.v v0, (t2) 84 add t2, a0, t0 855: 86 addi t1, t1, -2 87 vse8.v v0, (a0) 88 vse8.v v0, (t2) 89 sh1add a0, t0, a0 90 sh1add t2, t0, t2 91 92 bnez t1, 5b 937: 94 ret 95endfunc 96