blob: 7c45f26de4f79b75f5235e3bdb64658aa6bafbce [file] [log] [blame]
Andy Chiuc2a658d2024-01-15 05:59:24 +00001/* SPDX-License-Identifier: GPL-2.0-only */
2
3#include <linux/linkage.h>
Andy Chiuc2a658d2024-01-15 05:59:24 +00004#include <asm/asm.h>
5#include <asm/asm-extable.h>
6#include <asm/csr.h>
7
8#define pDst a0
9#define pSrc a1
10#define iNum a2
11
12#define iVL a3
13
14#define ELEM_LMUL_SETTING m8
15#define vData v0
16
17 .macro fixup op reg addr lbl
18100:
19 \op \reg, \addr
20 _asm_extable 100b, \lbl
21 .endm
22
23SYM_FUNC_START(__asm_vector_usercopy)
24 /* Enable access to user memory */
25 li t6, SR_SUM
26 csrs CSR_STATUS, t6
27
28loop:
29 vsetvli iVL, iNum, e8, ELEM_LMUL_SETTING, ta, ma
30 fixup vle8.v vData, (pSrc), 10f
31 sub iNum, iNum, iVL
32 add pSrc, pSrc, iVL
33 fixup vse8.v vData, (pDst), 11f
34 add pDst, pDst, iVL
35 bnez iNum, loop
36
37 /* Exception fixup for vector load is shared with normal exit */
3810:
39 /* Disable access to user memory */
40 csrc CSR_STATUS, t6
41 mv a0, iNum
42 ret
43
44 /* Exception fixup code for vector store. */
4511:
46 /* Undo the subtraction after vle8.v */
47 add iNum, iNum, iVL
48 /* Make sure the scalar fallback skip already processed bytes */
49 csrr t2, CSR_VSTART
50 sub iNum, iNum, t2
51 j 10b
52SYM_FUNC_END(__asm_vector_usercopy)