| /* SPDX-License-Identifier: LGPL-2.1 OR MIT */ |
| /* |
| * RISCV (32 and 64) specific definitions for NOLIBC |
| * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu> |
| */ |
| |
| #ifndef _NOLIBC_ARCH_RISCV_H |
| #define _NOLIBC_ARCH_RISCV_H |
| |
| #include "compiler.h" |
| #include "crt.h" |
| |
| /* Syscalls for RISCV : |
| * - stack is 16-byte aligned |
| * - syscall number is passed in a7 |
| * - arguments are in a0, a1, a2, a3, a4, a5 |
| * - the system call is performed by calling ecall |
| * - syscall return comes in a0 |
| * - the arguments are cast to long and assigned into the target |
| * registers which are then simply passed as registers to the asm code, |
| * so that we don't have to experience issues with register constraints. |
| */ |
| |
| #define my_syscall0(num) \ |
| ({ \ |
| register long _num __asm__ ("a7") = (num); \ |
| register long _arg1 __asm__ ("a0"); \ |
| \ |
| __asm__ volatile ( \ |
| "ecall\n\t" \ |
| : "=r"(_arg1) \ |
| : "r"(_num) \ |
| : "memory", "cc" \ |
| ); \ |
| _arg1; \ |
| }) |
| |
| #define my_syscall1(num, arg1) \ |
| ({ \ |
| register long _num __asm__ ("a7") = (num); \ |
| register long _arg1 __asm__ ("a0") = (long)(arg1); \ |
| \ |
| __asm__ volatile ( \ |
| "ecall\n" \ |
| : "+r"(_arg1) \ |
| : "r"(_num) \ |
| : "memory", "cc" \ |
| ); \ |
| _arg1; \ |
| }) |
| |
| #define my_syscall2(num, arg1, arg2) \ |
| ({ \ |
| register long _num __asm__ ("a7") = (num); \ |
| register long _arg1 __asm__ ("a0") = (long)(arg1); \ |
| register long _arg2 __asm__ ("a1") = (long)(arg2); \ |
| \ |
| __asm__ volatile ( \ |
| "ecall\n" \ |
| : "+r"(_arg1) \ |
| : "r"(_arg2), \ |
| "r"(_num) \ |
| : "memory", "cc" \ |
| ); \ |
| _arg1; \ |
| }) |
| |
| #define my_syscall3(num, arg1, arg2, arg3) \ |
| ({ \ |
| register long _num __asm__ ("a7") = (num); \ |
| register long _arg1 __asm__ ("a0") = (long)(arg1); \ |
| register long _arg2 __asm__ ("a1") = (long)(arg2); \ |
| register long _arg3 __asm__ ("a2") = (long)(arg3); \ |
| \ |
| __asm__ volatile ( \ |
| "ecall\n\t" \ |
| : "+r"(_arg1) \ |
| : "r"(_arg2), "r"(_arg3), \ |
| "r"(_num) \ |
| : "memory", "cc" \ |
| ); \ |
| _arg1; \ |
| }) |
| |
| #define my_syscall4(num, arg1, arg2, arg3, arg4) \ |
| ({ \ |
| register long _num __asm__ ("a7") = (num); \ |
| register long _arg1 __asm__ ("a0") = (long)(arg1); \ |
| register long _arg2 __asm__ ("a1") = (long)(arg2); \ |
| register long _arg3 __asm__ ("a2") = (long)(arg3); \ |
| register long _arg4 __asm__ ("a3") = (long)(arg4); \ |
| \ |
| __asm__ volatile ( \ |
| "ecall\n" \ |
| : "+r"(_arg1) \ |
| : "r"(_arg2), "r"(_arg3), "r"(_arg4), \ |
| "r"(_num) \ |
| : "memory", "cc" \ |
| ); \ |
| _arg1; \ |
| }) |
| |
| #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \ |
| ({ \ |
| register long _num __asm__ ("a7") = (num); \ |
| register long _arg1 __asm__ ("a0") = (long)(arg1); \ |
| register long _arg2 __asm__ ("a1") = (long)(arg2); \ |
| register long _arg3 __asm__ ("a2") = (long)(arg3); \ |
| register long _arg4 __asm__ ("a3") = (long)(arg4); \ |
| register long _arg5 __asm__ ("a4") = (long)(arg5); \ |
| \ |
| __asm__ volatile ( \ |
| "ecall\n" \ |
| : "+r"(_arg1) \ |
| : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ |
| "r"(_num) \ |
| : "memory", "cc" \ |
| ); \ |
| _arg1; \ |
| }) |
| |
| #define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \ |
| ({ \ |
| register long _num __asm__ ("a7") = (num); \ |
| register long _arg1 __asm__ ("a0") = (long)(arg1); \ |
| register long _arg2 __asm__ ("a1") = (long)(arg2); \ |
| register long _arg3 __asm__ ("a2") = (long)(arg3); \ |
| register long _arg4 __asm__ ("a3") = (long)(arg4); \ |
| register long _arg5 __asm__ ("a4") = (long)(arg5); \ |
| register long _arg6 __asm__ ("a5") = (long)(arg6); \ |
| \ |
| __asm__ volatile ( \ |
| "ecall\n" \ |
| : "+r"(_arg1) \ |
| : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), "r"(_arg6), \ |
| "r"(_num) \ |
| : "memory", "cc" \ |
| ); \ |
| _arg1; \ |
| }) |
| |
| /* startup code */ |
| void __attribute__((weak, noreturn, optimize("Os", "omit-frame-pointer"))) __no_stack_protector _start(void) |
| { |
| __asm__ volatile ( |
| ".option push\n" |
| ".option norelax\n" |
| "lla gp, __global_pointer$\n" |
| ".option pop\n" |
| "mv a0, sp\n" /* save stack pointer to a0, as arg1 of _start_c */ |
| "andi sp, a0, -16\n" /* sp must be 16-byte aligned */ |
| "call _start_c\n" /* transfer to c runtime */ |
| ); |
| __builtin_unreachable(); |
| } |
| |
| #endif /* _NOLIBC_ARCH_RISCV_H */ |