blob: 22a518f4ad65e2c8ad62bb593533a15fbd78d470 [file] [log] [blame]
Avi Kivity7d36db32010-08-03 14:07:34 +03001#include "ioram.h"
2#include "vm.h"
3#include "libcflat.h"
Gleb Natapove7c37962010-12-22 17:06:18 +02004#include "desc.h"
Avi Kivityd7143f32012-03-25 15:49:05 +02005#include "types.h"
Nadav Amitb39a3e12014-07-15 17:41:56 +03006#include "processor.h"
Paolo Bonziniefd8e5a2017-10-22 15:19:32 +02007#include "vmalloc.h"
Paolo Bonzini5aca0242017-10-22 15:19:32 +02008#include "alloc_page.h"
Bin Meng3ee1b912020-11-24 16:33:00 +08009#include "usermode.h"
Avi Kivity7d36db32010-08-03 14:07:34 +030010
Avi Kivity7d36db32010-08-03 14:07:34 +030011#define TESTDEV_IO_PORT 0xe0
12
Bin Meng3ee1b912020-11-24 16:33:00 +080013#define MAGIC_NUM 0xdeadbeefdeadbeefUL
14#define GS_BASE 0x400000
15
Avi Kivityd7143f32012-03-25 15:49:05 +020016static int exceptions;
17
Paolo Bonzini45fdc222018-04-11 15:01:42 +020018/* Forced emulation prefix, used to invoke the emulator unconditionally. */
19#define KVM_FEP "ud2; .byte 'k', 'v', 'm';"
20#define KVM_FEP_LENGTH 5
21static int fep_available = 1;
22
Arthur Chunqi Lic5a2a732013-06-20 22:36:08 +080023struct regs {
24 u64 rax, rbx, rcx, rdx;
25 u64 rsi, rdi, rsp, rbp;
26 u64 r8, r9, r10, r11;
27 u64 r12, r13, r14, r15;
28 u64 rip, rflags;
29};
30struct regs inregs, outregs, save;
31
32struct insn_desc {
33 u64 ptr;
34 size_t len;
35};
36
Avi Kivity7d36db32010-08-03 14:07:34 +030037static char st1[] = "abcdefghijklmnop";
38
Thomas Huth7db17e22018-06-27 10:02:07 +020039static void test_stringio(void)
Avi Kivity7d36db32010-08-03 14:07:34 +030040{
41 unsigned char r = 0;
42 asm volatile("cld \n\t"
43 "movw %0, %%dx \n\t"
44 "rep outsb \n\t"
45 : : "i"((short)TESTDEV_IO_PORT),
46 "S"(st1), "c"(sizeof(st1) - 1));
47 asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
Thomas Hutha2998952019-12-06 12:31:02 +010048 report(r == st1[sizeof(st1) - 2], "outsb up"); /* last char */
Avi Kivity7d36db32010-08-03 14:07:34 +030049
50 asm volatile("std \n\t"
51 "movw %0, %%dx \n\t"
52 "rep outsb \n\t"
53 : : "i"((short)TESTDEV_IO_PORT),
54 "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1));
55 asm volatile("cld \n\t" : : );
56 asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
Thomas Hutha2998952019-12-06 12:31:02 +010057 report(r == st1[0], "outsb down");
Avi Kivity7d36db32010-08-03 14:07:34 +030058}
59
Thomas Huthdb4898e2018-06-27 10:02:08 +020060static void test_cmps_one(unsigned char *m1, unsigned char *m3)
Avi Kivity7d36db32010-08-03 14:07:34 +030061{
62 void *rsi, *rdi;
63 long rcx, tmp;
64
65 rsi = m1; rdi = m3; rcx = 30;
66 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +030067 "repe cmpsb"
Avi Kivity7d36db32010-08-03 14:07:34 +030068 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
69 : : "cc");
Thomas Hutha2998952019-12-06 12:31:02 +010070 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe/cmpsb (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +030071
Avi Kivity51ba4182010-08-17 19:34:39 +030072 rsi = m1; rdi = m3; rcx = 30;
73 asm volatile("or $1, %[tmp]\n\t" // clear ZF
Roman Bolshakov2d331a42020-09-01 11:50:48 +030074 "repe cmpsb"
Avi Kivity51ba4182010-08-17 19:34:39 +030075 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
76 : : "cc");
Thomas Hutha2998952019-12-06 12:31:02 +010077 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30,
Roman Bolshakov2d331a42020-09-01 11:50:48 +030078 "repe cmpsb (1.zf)");
Avi Kivity51ba4182010-08-17 19:34:39 +030079
Avi Kivity7d36db32010-08-03 14:07:34 +030080 rsi = m1; rdi = m3; rcx = 15;
81 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +030082 "repe cmpsw"
Avi Kivity7d36db32010-08-03 14:07:34 +030083 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
84 : : "cc");
Roman Bolshakov2d331a42020-09-01 11:50:48 +030085 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe cmpsw (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +030086
87 rsi = m1; rdi = m3; rcx = 7;
88 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +030089 "repe cmpsl"
Avi Kivity7d36db32010-08-03 14:07:34 +030090 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
91 : : "cc");
Roman Bolshakov2d331a42020-09-01 11:50:48 +030092 report(rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28, "repe cmpll (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +030093
94 rsi = m1; rdi = m3; rcx = 4;
95 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +030096 "repe cmpsq"
Avi Kivity7d36db32010-08-03 14:07:34 +030097 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
98 : : "cc");
Roman Bolshakov2d331a42020-09-01 11:50:48 +030099 report(rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32, "repe cmpsq (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300100
101 rsi = m1; rdi = m3; rcx = 130;
102 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300103 "repe cmpsb"
Avi Kivity7d36db32010-08-03 14:07:34 +0300104 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
105 : : "cc");
Thomas Hutha2998952019-12-06 12:31:02 +0100106 report(rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101,
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300107 "repe cmpsb (2)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300108
109 rsi = m1; rdi = m3; rcx = 65;
110 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300111 "repe cmpsw"
Avi Kivity7d36db32010-08-03 14:07:34 +0300112 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
113 : : "cc");
Thomas Hutha2998952019-12-06 12:31:02 +0100114 report(rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102,
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300115 "repe cmpsw (2)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300116
117 rsi = m1; rdi = m3; rcx = 32;
118 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300119 "repe cmpsl"
Avi Kivity7d36db32010-08-03 14:07:34 +0300120 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
121 : : "cc");
Thomas Hutha2998952019-12-06 12:31:02 +0100122 report(rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104,
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300123 "repe cmpll (2)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300124
125 rsi = m1; rdi = m3; rcx = 16;
126 asm volatile("xor %[tmp], %[tmp] \n\t"
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300127 "repe cmpsq"
Avi Kivity7d36db32010-08-03 14:07:34 +0300128 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
129 : : "cc");
Thomas Hutha2998952019-12-06 12:31:02 +0100130 report(rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104,
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300131 "repe cmpsq (2)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300132
133}
134
Thomas Huthdb4898e2018-06-27 10:02:08 +0200135static void test_cmps(void *mem)
Avi Kivity7d36db32010-08-03 14:07:34 +0300136{
137 unsigned char *m1 = mem, *m2 = mem + 1024;
138 unsigned char m3[1024];
139
140 for (int i = 0; i < 100; ++i)
141 m1[i] = m2[i] = m3[i] = i;
142 for (int i = 100; i < 200; ++i)
143 m1[i] = (m3[i] = m2[i] = i) + 1;
144 test_cmps_one(m1, m3);
145 test_cmps_one(m1, m2);
146}
147
Thomas Huthdb4898e2018-06-27 10:02:08 +0200148static void test_scas(void *mem)
Avi Kivity80a4ea72010-08-17 17:44:14 +0300149{
150 bool z;
151 void *di;
152
153 *(ulong *)mem = 0x77665544332211;
154
155 di = mem;
156 asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11));
Thomas Hutha2998952019-12-06 12:31:02 +0100157 report(di == mem + 1 && z, "scasb match");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300158
159 di = mem;
160 asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54));
Thomas Hutha2998952019-12-06 12:31:02 +0100161 report(di == mem + 1 && !z, "scasb mismatch");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300162
163 di = mem;
164 asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211));
Thomas Hutha2998952019-12-06 12:31:02 +0100165 report(di == mem + 2 && z, "scasw match");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300166
167 di = mem;
168 asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11));
Thomas Hutha2998952019-12-06 12:31:02 +0100169 report(di == mem + 2 && !z, "scasw mismatch");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300170
171 di = mem;
172 asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff44332211ul));
Thomas Hutha2998952019-12-06 12:31:02 +0100173 report(di == mem + 4 && z, "scasd match");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300174
175 di = mem;
176 asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211));
Thomas Hutha2998952019-12-06 12:31:02 +0100177 report(di == mem + 4 && !z, "scasd mismatch");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300178
179 di = mem;
180 asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul));
Thomas Hutha2998952019-12-06 12:31:02 +0100181 report(di == mem + 8 && z, "scasq match");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300182
183 di = mem;
184 asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3));
Thomas Hutha2998952019-12-06 12:31:02 +0100185 report(di == mem + 8 && !z, "scasq mismatch");
Avi Kivity80a4ea72010-08-17 17:44:14 +0300186}
187
Thomas Huthdb4898e2018-06-27 10:02:08 +0200188static void test_cr8(void)
Avi Kivity7d36db32010-08-03 14:07:34 +0300189{
190 unsigned long src, dst;
191
192 dst = 777;
193 src = 3;
194 asm volatile("mov %[src], %%cr8; mov %%cr8, %[dst]"
195 : [dst]"+r"(dst), [src]"+r"(src));
Thomas Hutha2998952019-12-06 12:31:02 +0100196 report(dst == 3 && src == 3, "mov %%cr8");
Avi Kivity7d36db32010-08-03 14:07:34 +0300197}
198
Thomas Huthdb4898e2018-06-27 10:02:08 +0200199static void test_push(void *mem)
Avi Kivity7d36db32010-08-03 14:07:34 +0300200{
201 unsigned long tmp;
202 unsigned long *stack_top = mem + 4096;
203 unsigned long *new_stack_top;
204 unsigned long memw = 0x123456789abcdeful;
205
206 memset(mem, 0x55, (void *)stack_top - mem);
207
208 asm volatile("mov %%rsp, %[tmp] \n\t"
209 "mov %[stack_top], %%rsp \n\t"
210 "pushq $-7 \n\t"
211 "pushq %[reg] \n\t"
212 "pushq (%[mem]) \n\t"
213 "pushq $-7070707 \n\t"
214 "mov %%rsp, %[new_stack_top] \n\t"
215 "mov %[tmp], %%rsp"
216 : [tmp]"=&r"(tmp), [new_stack_top]"=r"(new_stack_top)
217 : [stack_top]"r"(stack_top),
218 [reg]"r"(-17l), [mem]"r"(&memw)
219 : "memory");
220
Thomas Hutha2998952019-12-06 12:31:02 +0100221 report(stack_top[-1] == -7ul, "push $imm8");
222 report(stack_top[-2] == -17ul, "push %%reg");
223 report(stack_top[-3] == 0x123456789abcdeful, "push mem");
224 report(stack_top[-4] == -7070707, "push $imm");
Avi Kivity7d36db32010-08-03 14:07:34 +0300225}
226
Thomas Huthdb4898e2018-06-27 10:02:08 +0200227static void test_pop(void *mem)
Avi Kivity7d36db32010-08-03 14:07:34 +0300228{
Avi Kivity28f04f22012-06-27 11:36:38 +0300229 unsigned long tmp, tmp3, rsp, rbp;
Avi Kivity7d36db32010-08-03 14:07:34 +0300230 unsigned long *stack_top = mem + 4096;
231 unsigned long memw = 0x123456789abcdeful;
232 static unsigned long tmp2;
233
234 memset(mem, 0x55, (void *)stack_top - mem);
235
236 asm volatile("pushq %[val] \n\t"
237 "popq (%[mem])"
238 : : [val]"m"(memw), [mem]"r"(mem) : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100239 report(*(unsigned long *)mem == memw, "pop mem");
Avi Kivity7d36db32010-08-03 14:07:34 +0300240
241 memw = 7 - memw;
242 asm volatile("mov %%rsp, %[tmp] \n\t"
243 "mov %[stack_top], %%rsp \n\t"
244 "pushq %[val] \n\t"
245 "popq %[tmp2] \n\t"
246 "mov %[tmp], %%rsp"
247 : [tmp]"=&r"(tmp), [tmp2]"=m"(tmp2)
248 : [val]"r"(memw), [stack_top]"r"(stack_top)
249 : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100250 report(tmp2 == memw, "pop mem (2)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300251
252 memw = 129443 - memw;
253 asm volatile("mov %%rsp, %[tmp] \n\t"
254 "mov %[stack_top], %%rsp \n\t"
255 "pushq %[val] \n\t"
256 "popq %[tmp2] \n\t"
257 "mov %[tmp], %%rsp"
258 : [tmp]"=&r"(tmp), [tmp2]"=r"(tmp2)
259 : [val]"r"(memw), [stack_top]"r"(stack_top)
260 : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100261 report(tmp2 == memw, "pop reg");
Avi Kivity7d36db32010-08-03 14:07:34 +0300262
263 asm volatile("mov %%rsp, %[tmp] \n\t"
264 "mov %[stack_top], %%rsp \n\t"
Zixuan Wang7bf81442021-10-30 22:56:30 -0700265 "lea 1f(%%rip), %%rax \n\t"
266 "push %%rax \n\t"
Avi Kivity7d36db32010-08-03 14:07:34 +0300267 "ret \n\t"
268 "2: jmp 2b \n\t"
269 "1: mov %[tmp], %%rsp"
270 : [tmp]"=&r"(tmp) : [stack_top]"r"(stack_top)
Zixuan Wang7bf81442021-10-30 22:56:30 -0700271 : "memory", "rax");
Janis Schoetterl-Glausch5c3582f2021-10-05 11:09:21 +0200272 report_pass("ret");
Avi Kivity5269d6e2012-06-27 11:36:31 +0300273
274 stack_top[-1] = 0x778899;
Peter Feinerc2aa6122016-03-02 17:09:31 -0800275 asm volatile("mov %[stack_top], %%r8 \n\t"
276 "mov %%rsp, %%r9 \n\t"
277 "xchg %%rbp, %%r8 \n\t"
Avi Kivity5269d6e2012-06-27 11:36:31 +0300278 "leave \n\t"
Peter Feinerc2aa6122016-03-02 17:09:31 -0800279 "xchg %%rsp, %%r9 \n\t"
280 "xchg %%rbp, %%r8 \n\t"
281 "mov %%r9, %[tmp] \n\t"
282 "mov %%r8, %[tmp3]"
Avi Kivity5269d6e2012-06-27 11:36:31 +0300283 : [tmp]"=&r"(tmp), [tmp3]"=&r"(tmp3) : [stack_top]"r"(stack_top-1)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800284 : "memory", "r8", "r9");
Thomas Hutha2998952019-12-06 12:31:02 +0100285 report(tmp == (ulong)stack_top && tmp3 == 0x778899, "leave");
Avi Kivity28f04f22012-06-27 11:36:38 +0300286
287 rbp = 0xaa55aa55bb66bb66ULL;
288 rsp = (unsigned long)stack_top;
Peter Feinerc2aa6122016-03-02 17:09:31 -0800289 asm volatile("mov %[rsp], %%r8 \n\t"
290 "mov %[rbp], %%r9 \n\t"
291 "xchg %%rsp, %%r8 \n\t"
292 "xchg %%rbp, %%r9 \n\t"
Avi Kivity28f04f22012-06-27 11:36:38 +0300293 "enter $0x1238, $0 \n\t"
Peter Feinerc2aa6122016-03-02 17:09:31 -0800294 "xchg %%rsp, %%r8 \n\t"
295 "xchg %%rbp, %%r9 \n\t"
296 "xchg %%r8, %[rsp] \n\t"
297 "xchg %%r9, %[rbp]"
298 : [rsp]"+a"(rsp), [rbp]"+b"(rbp) : : "memory", "r8", "r9");
Thomas Hutha2998952019-12-06 12:31:02 +0100299 report(rsp == (unsigned long)stack_top - 8 - 0x1238
Avi Kivity28f04f22012-06-27 11:36:38 +0300300 && rbp == (unsigned long)stack_top - 8
Thomas Hutha2998952019-12-06 12:31:02 +0100301 && stack_top[-1] == 0xaa55aa55bb66bb66ULL,
302 "enter");
Avi Kivity7d36db32010-08-03 14:07:34 +0300303}
304
Thomas Huthdb4898e2018-06-27 10:02:08 +0200305static void test_ljmp(void *mem)
Avi Kivity7d36db32010-08-03 14:07:34 +0300306{
307 unsigned char *m = mem;
308 volatile int res = 1;
309
310 *(unsigned long**)m = &&jmpf;
Roman Bolshakov2d331a42020-09-01 11:50:48 +0300311 asm volatile ("data16 mov %%cs, %0":"=m"(*(m + sizeof(unsigned long))));
312 asm volatile ("rex64 ljmp *%0"::"m"(*m));
Avi Kivity7d36db32010-08-03 14:07:34 +0300313 res = 0;
314jmpf:
Thomas Hutha2998952019-12-06 12:31:02 +0100315 report(res, "ljmp");
Avi Kivity7d36db32010-08-03 14:07:34 +0300316}
317
Thomas Huthdb4898e2018-06-27 10:02:08 +0200318static void test_incdecnotneg(void *mem)
Avi Kivity7d36db32010-08-03 14:07:34 +0300319{
320 unsigned long *m = mem, v = 1234;
321 unsigned char *mb = mem, vb = 66;
322
323 *m = 0;
324
325 asm volatile ("incl %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100326 report(*m == 1, "incl");
Avi Kivity7d36db32010-08-03 14:07:34 +0300327 asm volatile ("decl %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100328 report(*m == 0, "decl");
Avi Kivity7d36db32010-08-03 14:07:34 +0300329 asm volatile ("incb %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100330 report(*m == 1, "incb");
Avi Kivity7d36db32010-08-03 14:07:34 +0300331 asm volatile ("decb %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100332 report(*m == 0, "decb");
Avi Kivity7d36db32010-08-03 14:07:34 +0300333
334 asm volatile ("lock incl %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100335 report(*m == 1, "lock incl");
Avi Kivity7d36db32010-08-03 14:07:34 +0300336 asm volatile ("lock decl %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100337 report(*m == 0, "lock decl");
Avi Kivity7d36db32010-08-03 14:07:34 +0300338 asm volatile ("lock incb %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100339 report(*m == 1, "lock incb");
Avi Kivity7d36db32010-08-03 14:07:34 +0300340 asm volatile ("lock decb %0":"+m"(*m));
Thomas Hutha2998952019-12-06 12:31:02 +0100341 report(*m == 0, "lock decb");
Avi Kivity7d36db32010-08-03 14:07:34 +0300342
343 *m = v;
344
345 asm ("lock negq %0" : "+m"(*m)); v = -v;
Thomas Hutha2998952019-12-06 12:31:02 +0100346 report(*m == v, "lock negl");
Avi Kivity7d36db32010-08-03 14:07:34 +0300347 asm ("lock notq %0" : "+m"(*m)); v = ~v;
Thomas Hutha2998952019-12-06 12:31:02 +0100348 report(*m == v, "lock notl");
Avi Kivity7d36db32010-08-03 14:07:34 +0300349
350 *mb = vb;
351
352 asm ("lock negb %0" : "+m"(*mb)); vb = -vb;
Thomas Hutha2998952019-12-06 12:31:02 +0100353 report(*mb == vb, "lock negb");
Avi Kivity7d36db32010-08-03 14:07:34 +0300354 asm ("lock notb %0" : "+m"(*mb)); vb = ~vb;
Thomas Hutha2998952019-12-06 12:31:02 +0100355 report(*mb == vb, "lock notb");
Avi Kivity7d36db32010-08-03 14:07:34 +0300356}
357
Thomas Huthdb4898e2018-06-27 10:02:08 +0200358static void test_smsw(uint64_t *h_mem)
Avi Kivity7d36db32010-08-03 14:07:34 +0300359{
360 char mem[16];
361 unsigned short msw, msw_orig, *pmsw;
362 int i, zero;
363
364 msw_orig = read_cr0();
365
366 asm("smsw %0" : "=r"(msw));
Thomas Hutha2998952019-12-06 12:31:02 +0100367 report(msw == msw_orig, "smsw (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300368
369 memset(mem, 0, 16);
370 pmsw = (void *)mem;
371 asm("smsw %0" : "=m"(pmsw[4]));
372 zero = 1;
373 for (i = 0; i < 8; ++i)
374 if (i != 4 && pmsw[i])
375 zero = 0;
Thomas Hutha2998952019-12-06 12:31:02 +0100376 report(msw == pmsw[4] && zero, "smsw (2)");
Nadav Amit40039632014-06-06 02:56:52 +0300377
378 /* Trigger exit on smsw */
379 *h_mem = 0x12345678abcdeful;
Chris J Arges11147082014-12-03 09:44:15 -0600380 asm volatile("smsw %0" : "+m"(*h_mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100381 report(msw == (unsigned short)*h_mem &&
382 (*h_mem & ~0xfffful) == 0x12345678ab0000ul, "smsw (3)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300383}
384
Thomas Huthdb4898e2018-06-27 10:02:08 +0200385static void test_lmsw(void)
Avi Kivity7d36db32010-08-03 14:07:34 +0300386{
387 char mem[16];
388 unsigned short msw, *pmsw;
389 unsigned long cr0;
390
391 cr0 = read_cr0();
392
393 msw = cr0 ^ 8;
394 asm("lmsw %0" : : "r"(msw));
395 printf("before %lx after %lx\n", cr0, read_cr0());
Thomas Hutha2998952019-12-06 12:31:02 +0100396 report((cr0 ^ read_cr0()) == 8, "lmsw (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300397
398 pmsw = (void *)mem;
399 *pmsw = cr0;
400 asm("lmsw %0" : : "m"(*pmsw));
401 printf("before %lx after %lx\n", cr0, read_cr0());
Thomas Hutha2998952019-12-06 12:31:02 +0100402 report(cr0 == read_cr0(), "lmsw (2)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300403
404 /* lmsw can't clear cr0.pe */
405 msw = (cr0 & ~1ul) ^ 4; /* change EM to force trap */
406 asm("lmsw %0" : : "r"(msw));
Thomas Hutha2998952019-12-06 12:31:02 +0100407 report((cr0 ^ read_cr0()) == 4 && (cr0 & 1), "lmsw (3)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300408
409 /* back to normal */
410 msw = cr0;
411 asm("lmsw %0" : : "r"(msw));
412}
413
Thomas Huthdb4898e2018-06-27 10:02:08 +0200414static void test_xchg(void *mem)
Avi Kivity7d36db32010-08-03 14:07:34 +0300415{
416 unsigned long *memq = mem;
417 unsigned long rax;
418
419 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
420 "mov %%rax, (%[memq])\n\t"
421 "mov $0xfedcba9876543210, %%rax\n\t"
422 "xchg %%al, (%[memq])\n\t"
423 "mov %%rax, %[rax]\n\t"
424 : [rax]"=r"(rax)
425 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800426 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100427 report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcd10,
428 "xchg reg, r/m (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300429
430 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
431 "mov %%rax, (%[memq])\n\t"
432 "mov $0xfedcba9876543210, %%rax\n\t"
433 "xchg %%ax, (%[memq])\n\t"
434 "mov %%rax, %[rax]\n\t"
435 : [rax]"=r"(rax)
436 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800437 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100438 report(rax == 0xfedcba987654cdef && *memq == 0x123456789ab3210,
439 "xchg reg, r/m (2)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300440
441 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
442 "mov %%rax, (%[memq])\n\t"
443 "mov $0xfedcba9876543210, %%rax\n\t"
444 "xchg %%eax, (%[memq])\n\t"
445 "mov %%rax, %[rax]\n\t"
446 : [rax]"=r"(rax)
447 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800448 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100449 report(rax == 0x89abcdef && *memq == 0x123456776543210,
450 "xchg reg, r/m (3)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300451
452 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
453 "mov %%rax, (%[memq])\n\t"
454 "mov $0xfedcba9876543210, %%rax\n\t"
455 "xchg %%rax, (%[memq])\n\t"
456 "mov %%rax, %[rax]\n\t"
457 : [rax]"=r"(rax)
458 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800459 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100460 report(rax == 0x123456789abcdef && *memq == 0xfedcba9876543210,
461 "xchg reg, r/m (4)");
Avi Kivity7d36db32010-08-03 14:07:34 +0300462}
463
Thomas Huthdb4898e2018-06-27 10:02:08 +0200464static void test_xadd(void *mem)
Wei Yongjun5647d552010-08-12 21:44:01 +0800465{
466 unsigned long *memq = mem;
467 unsigned long rax;
468
469 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
470 "mov %%rax, (%[memq])\n\t"
471 "mov $0xfedcba9876543210, %%rax\n\t"
472 "xadd %%al, (%[memq])\n\t"
473 "mov %%rax, %[rax]\n\t"
474 : [rax]"=r"(rax)
475 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800476 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100477 report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcdff,
478 "xadd reg, r/m (1)");
Wei Yongjun5647d552010-08-12 21:44:01 +0800479
480 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
481 "mov %%rax, (%[memq])\n\t"
482 "mov $0xfedcba9876543210, %%rax\n\t"
483 "xadd %%ax, (%[memq])\n\t"
484 "mov %%rax, %[rax]\n\t"
485 : [rax]"=r"(rax)
486 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800487 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100488 report(rax == 0xfedcba987654cdef && *memq == 0x123456789abffff,
489 "xadd reg, r/m (2)");
Wei Yongjun5647d552010-08-12 21:44:01 +0800490
491 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
492 "mov %%rax, (%[memq])\n\t"
493 "mov $0xfedcba9876543210, %%rax\n\t"
494 "xadd %%eax, (%[memq])\n\t"
495 "mov %%rax, %[rax]\n\t"
496 : [rax]"=r"(rax)
497 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800498 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100499 report(rax == 0x89abcdef && *memq == 0x1234567ffffffff,
500 "xadd reg, r/m (3)");
Wei Yongjun5647d552010-08-12 21:44:01 +0800501
502 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
503 "mov %%rax, (%[memq])\n\t"
504 "mov $0xfedcba9876543210, %%rax\n\t"
505 "xadd %%rax, (%[memq])\n\t"
506 "mov %%rax, %[rax]\n\t"
507 : [rax]"=r"(rax)
508 : [memq]"r"(memq)
Peter Feinerc2aa6122016-03-02 17:09:31 -0800509 : "memory", "rax");
Thomas Hutha2998952019-12-06 12:31:02 +0100510 report(rax == 0x123456789abcdef && *memq == 0xffffffffffffffff,
511 "xadd reg, r/m (4)");
Wei Yongjun5647d552010-08-12 21:44:01 +0800512}
513
Thomas Huthdb4898e2018-06-27 10:02:08 +0200514static void test_btc(void *mem)
Wei Yongjund4655ea2010-08-05 14:09:15 +0800515{
516 unsigned int *a = mem;
517
Nadav Amit7e083f22014-06-18 17:19:52 +0300518 memset(mem, 0, 4 * sizeof(unsigned int));
Wei Yongjund4655ea2010-08-05 14:09:15 +0800519
520 asm ("btcl $32, %0" :: "m"(a[0]) : "memory");
521 asm ("btcl $1, %0" :: "m"(a[1]) : "memory");
522 asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100523 report(a[0] == 1 && a[1] == 2 && a[2] == 4, "btcl imm8, r/m");
Wei Yongjund4655ea2010-08-05 14:09:15 +0800524
525 asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100526 report(a[0] == 1 && a[1] == 2 && a[2] == 0x80000004, "btcl reg, r/m");
Nadav Amit7e083f22014-06-18 17:19:52 +0300527
528 asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100529 report(a[0] == 1 && a[1] == 0x80000002 && a[2] == 0x80000004 && a[3] == 0,
530 "btcq reg, r/m");
Wei Yongjund4655ea2010-08-05 14:09:15 +0800531}
532
Thomas Huthdb4898e2018-06-27 10:02:08 +0200533static void test_bsfbsr(void *mem)
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800534{
Avi Kivity554de462011-11-28 15:09:34 +0200535 unsigned long rax, *memq = mem;
536 unsigned eax, *meml = mem;
537 unsigned short ax, *memw = mem;
538 unsigned char z;
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800539
Avi Kivity554de462011-11-28 15:09:34 +0200540 *memw = 0xc000;
541 asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
Thomas Hutha2998952019-12-06 12:31:02 +0100542 report(ax == 14, "bsfw r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800543
Avi Kivity554de462011-11-28 15:09:34 +0200544 *meml = 0xc0000000;
545 asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
Thomas Hutha2998952019-12-06 12:31:02 +0100546 report(eax == 30, "bsfl r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800547
Avi Kivity554de462011-11-28 15:09:34 +0200548 *memq = 0xc00000000000;
549 asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
Thomas Hutha2998952019-12-06 12:31:02 +0100550 report(rax == 46, "bsfq r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800551
Avi Kivity554de462011-11-28 15:09:34 +0200552 *memq = 0;
553 asm("bsfq %[mem], %[a]; setz %[z]"
554 : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
Thomas Hutha2998952019-12-06 12:31:02 +0100555 report(z == 1, "bsfq r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800556
Avi Kivity554de462011-11-28 15:09:34 +0200557 *memw = 0xc000;
558 asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
Thomas Hutha2998952019-12-06 12:31:02 +0100559 report(ax == 15, "bsrw r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800560
Avi Kivity554de462011-11-28 15:09:34 +0200561 *meml = 0xc0000000;
562 asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
Thomas Hutha2998952019-12-06 12:31:02 +0100563 report(eax == 31, "bsrl r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800564
Avi Kivity554de462011-11-28 15:09:34 +0200565 *memq = 0xc00000000000;
566 asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
Thomas Hutha2998952019-12-06 12:31:02 +0100567 report(rax == 47, "bsrq r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800568
Avi Kivity554de462011-11-28 15:09:34 +0200569 *memq = 0;
570 asm("bsrq %[mem], %[a]; setz %[z]"
571 : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
Thomas Hutha2998952019-12-06 12:31:02 +0100572 report(z == 1, "bsrq r/m, reg");
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800573}
574
Avi Kivity51d65a32010-08-19 19:15:31 +0300575static void test_imul(ulong *mem)
576{
577 ulong a;
578
579 *mem = 51; a = 0x1234567812345678UL;
580 asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100581 report(a == 0x12345678123439e8, "imul ax, mem");
Avi Kivity51d65a32010-08-19 19:15:31 +0300582
583 *mem = 51; a = 0x1234567812345678UL;
584 asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100585 report(a == 0xa06d39e8, "imul eax, mem");
Avi Kivity51d65a32010-08-19 19:15:31 +0300586
587 *mem = 51; a = 0x1234567812345678UL;
588 asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100589 report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem");
Avi Kivity51d65a32010-08-19 19:15:31 +0300590
591 *mem = 0x1234567812345678UL; a = 0x8765432187654321L;
592 asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100593 report(a == 0x87654321876539e8, "imul ax, mem, imm8");
Avi Kivity51d65a32010-08-19 19:15:31 +0300594
595 *mem = 0x1234567812345678UL;
596 asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100597 report(a == 0xa06d39e8, "imul eax, mem, imm8");
Avi Kivity51d65a32010-08-19 19:15:31 +0300598
599 *mem = 0x1234567812345678UL;
600 asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100601 report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem, imm8");
Avi Kivity51d65a32010-08-19 19:15:31 +0300602
603 *mem = 0x1234567812345678UL; a = 0x8765432187654321L;
604 asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100605 report(a == 0x8765432187650bc8, "imul ax, mem, imm");
Avi Kivity51d65a32010-08-19 19:15:31 +0300606
607 *mem = 0x1234567812345678UL;
608 asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100609 report(a == 0x1d950bc8, "imul eax, mem, imm");
Avi Kivity51d65a32010-08-19 19:15:31 +0300610
611 *mem = 0x1234567812345678UL;
612 asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100613 report(a == 0x1D950BDE1D950BC8L, "imul rax, mem, imm");
Avi Kivity51d65a32010-08-19 19:15:31 +0300614}
615
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200616static void test_muldiv(long *mem)
Avi Kivityf12d86b2010-08-24 14:01:11 +0300617{
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200618 long a, d, aa, dd;
Avi Kivityf12d86b2010-08-24 14:01:11 +0300619 u8 ex = 1;
620
621 *mem = 0; a = 1; d = 2;
622 asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
623 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100624 report(a == 1 && d == 2 && ex, "divq (fault)");
Avi Kivityf12d86b2010-08-24 14:01:11 +0300625
626 *mem = 987654321098765UL; a = 123456789012345UL; d = 123456789012345UL;
627 asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
628 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100629 report(a == 0x1ffffffb1b963b33ul && d == 0x273ba4384ede2ul && !ex,
630 "divq (1)");
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200631 aa = 0x1111111111111111; dd = 0x2222222222222222;
632 *mem = 0x3333333333333333; a = aa; d = dd;
633 asm("mulb %2" : "+a"(a), "+d"(d) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100634 report(a == 0x1111111111110363 && d == dd, "mulb mem");
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200635 *mem = 0x3333333333333333; a = aa; d = dd;
636 asm("mulw %2" : "+a"(a), "+d"(d) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100637 report(a == 0x111111111111c963 && d == 0x2222222222220369, "mulw mem");
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200638 *mem = 0x3333333333333333; a = aa; d = dd;
639 asm("mull %2" : "+a"(a), "+d"(d) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100640 report(a == 0x962fc963 && d == 0x369d036, "mull mem");
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200641 *mem = 0x3333333333333333; a = aa; d = dd;
642 asm("mulq %2" : "+a"(a), "+d"(d) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100643 report(a == 0x2fc962fc962fc963 && d == 0x369d0369d0369d0, "mulq mem");
Avi Kivityf12d86b2010-08-24 14:01:11 +0300644}
645
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200646typedef unsigned __attribute__((vector_size(16))) sse128;
647
Jacob Xu93a3ae42021-05-10 18:50:15 -0700648static bool sseeq(uint32_t *v1, uint32_t *v2)
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200649{
650 bool ok = true;
651 int i;
652
653 for (i = 0; i < 4; ++i) {
Jacob Xu93a3ae42021-05-10 18:50:15 -0700654 ok &= v1[i] == v2[i];
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200655 }
656
657 return ok;
658}
659
Jacob Xu93a3ae42021-05-10 18:50:15 -0700660static __attribute__((target("sse2"))) void test_sse(uint32_t *mem)
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200661{
Jacob Xu93a3ae42021-05-10 18:50:15 -0700662 sse128 vv;
663 uint32_t *v = (uint32_t *)&vv;
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200664
Jacob Xu8726f972021-04-21 16:12:57 -0700665 write_cr0(read_cr0() & ~6); /* EM, TS */
666 write_cr4(read_cr4() | 0x200); /* OSFXSR */
Jacob Xu93a3ae42021-05-10 18:50:15 -0700667 memset(&vv, 0, sizeof(vv));
Igor Mammedov290ed5d2014-03-15 21:03:37 +0100668
Jacob Xu8726f972021-04-21 16:12:57 -0700669#define TEST_RW_SSE(insn) do { \
Jacob Xu93a3ae42021-05-10 18:50:15 -0700670 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; \
671 asm(insn " %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); \
672 report(sseeq(v, mem), insn " (read)"); \
673 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; \
674 asm(insn " %1, %0" : "=x"(vv) : "m"(*mem) : "memory"); \
675 report(sseeq(v, mem), insn " (write)"); \
Jacob Xu8726f972021-04-21 16:12:57 -0700676} while (0)
Igor Mammedovf068a462014-03-15 21:03:38 +0100677
Jacob Xu8726f972021-04-21 16:12:57 -0700678 TEST_RW_SSE("movdqu");
679 TEST_RW_SSE("movaps");
680 TEST_RW_SSE("movapd");
681 TEST_RW_SSE("movups");
682 TEST_RW_SSE("movupd");
683#undef TEST_RW_SSE
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200684}
685
Jacob Xue5e76262021-04-21 16:12:58 -0700686static void unaligned_movaps_handler(struct ex_regs *regs)
687{
688 extern char unaligned_movaps_cont;
689
690 ++exceptions;
691 regs->rip = (ulong)&unaligned_movaps_cont;
692}
693
694static void cross_movups_handler(struct ex_regs *regs)
695{
696 extern char cross_movups_cont;
697
698 ++exceptions;
699 regs->rip = (ulong)&cross_movups_cont;
700}
701
702static __attribute__((target("sse2"))) void test_sse_exceptions(void *cross_mem)
703{
Jacob Xu93a3ae42021-05-10 18:50:15 -0700704 sse128 vv;
705 uint32_t *v = (uint32_t *)&vv;
706 uint32_t *mem;
Jacob Xue5e76262021-04-21 16:12:58 -0700707 uint8_t *bytes = cross_mem; // aligned on PAGE_SIZE*2
708 void *page2 = (void *)(&bytes[4096]);
709 struct pte_search search;
710 pteval_t orig_pte;
711
712 // setup memory for unaligned access
Jacob Xu93a3ae42021-05-10 18:50:15 -0700713 mem = (uint32_t *)(&bytes[8]);
Jacob Xue5e76262021-04-21 16:12:58 -0700714
715 // test unaligned access for movups, movupd and movaps
Jacob Xu93a3ae42021-05-10 18:50:15 -0700716 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
717 mem[0] = 5; mem[1] = 6; mem[2] = 8; mem[3] = 9;
718 asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
719 report(sseeq(v, mem), "movups unaligned");
Jacob Xue5e76262021-04-21 16:12:58 -0700720
Jacob Xu93a3ae42021-05-10 18:50:15 -0700721 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
722 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8;
723 asm("movupd %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
724 report(sseeq(v, mem), "movupd unaligned");
Jacob Xue5e76262021-04-21 16:12:58 -0700725 exceptions = 0;
726 handle_exception(GP_VECTOR, unaligned_movaps_handler);
727 asm("movaps %1, %0\n\t unaligned_movaps_cont:"
Jacob Xu93a3ae42021-05-10 18:50:15 -0700728 : "=m"(*mem) : "x"(vv));
Jacob Xue5e76262021-04-21 16:12:58 -0700729 handle_exception(GP_VECTOR, 0);
730 report(exceptions == 1, "unaligned movaps exception");
731
732 // setup memory for cross page access
Jacob Xu93a3ae42021-05-10 18:50:15 -0700733 mem = (uint32_t *)(&bytes[4096-8]);
734 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
735 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8;
Jacob Xue5e76262021-04-21 16:12:58 -0700736
Jacob Xu93a3ae42021-05-10 18:50:15 -0700737 asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
738 report(sseeq(v, mem), "movups unaligned crosspage");
Jacob Xue5e76262021-04-21 16:12:58 -0700739
740 // invalidate second page
741 search = find_pte_level(current_page_table(), page2, 1);
742 orig_pte = *search.pte;
743 install_pte(current_page_table(), 1, page2, 0, NULL);
744 invlpg(page2);
745
746 exceptions = 0;
747 handle_exception(PF_VECTOR, cross_movups_handler);
Jacob Xu93a3ae42021-05-10 18:50:15 -0700748 asm("movups %1, %0\n\t cross_movups_cont:" : "=m"(*mem) : "x"(vv) :
749 "memory");
Jacob Xue5e76262021-04-21 16:12:58 -0700750 handle_exception(PF_VECTOR, 0);
751 report(exceptions == 1, "movups crosspage exception");
752
753 // restore invalidated page
754 install_pte(current_page_table(), 1, page2, orig_pte, NULL);
755}
756
Avi Kivity35870822012-03-22 12:58:06 +0200757static void test_mmx(uint64_t *mem)
758{
759 uint64_t v;
760
761 write_cr0(read_cr0() & ~6); /* EM, TS */
762 asm volatile("fninit");
763 v = 0x0102030405060708ULL;
764 asm("movq %1, %0" : "=m"(*mem) : "y"(v));
Thomas Hutha2998952019-12-06 12:31:02 +0100765 report(v == *mem, "movq (mmx, read)");
Avi Kivity35870822012-03-22 12:58:06 +0200766 *mem = 0x8070605040302010ull;
767 asm("movq %1, %0" : "=y"(v) : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100768 report(v == *mem, "movq (mmx, write)");
Avi Kivity35870822012-03-22 12:58:06 +0200769}
770
Avi Kivity8cfa5a02011-06-19 19:50:55 +0300771static void test_rip_relative(unsigned *mem, char *insn_ram)
772{
773 /* movb $1, mem+2(%rip) */
774 insn_ram[0] = 0xc6;
775 insn_ram[1] = 0x05;
776 *(unsigned *)&insn_ram[2] = 2 + (char *)mem - (insn_ram + 7);
777 insn_ram[6] = 0x01;
778 /* ret */
779 insn_ram[7] = 0xc3;
780
781 *mem = 0;
782 asm("callq *%1" : "+m"(*mem) : "r"(insn_ram));
Thomas Hutha2998952019-12-06 12:31:02 +0100783 report(*mem == 0x10000, "movb $imm, 0(%%rip)");
Avi Kivity8cfa5a02011-06-19 19:50:55 +0300784}
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200785
Avi Kivityb212fcd2011-09-13 11:15:16 +0300786static void test_shld_shrd(u32 *mem)
787{
788 *mem = 0x12345678;
789 asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3));
Thomas Hutha2998952019-12-06 12:31:02 +0100790 report(*mem == ((0x12345678 << 3) | 5), "shld (cl)");
Avi Kivityb212fcd2011-09-13 11:15:16 +0300791 *mem = 0x12345678;
792 asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3));
Thomas Hutha2998952019-12-06 12:31:02 +0100793 report(*mem == ((0x12345678 >> 3) | (5u << 29)), "shrd (cl)");
Avi Kivityb212fcd2011-09-13 11:15:16 +0300794}
795
Nadav Amit30762172014-06-18 17:19:53 +0300796static void test_cmov(u32 *mem)
797{
798 u64 val;
799 *mem = 0xabcdef12u;
800 asm ("movq $0x1234567812345678, %%rax\n\t"
801 "cmpl %%eax, %%eax\n\t"
802 "cmovnel (%[mem]), %%eax\n\t"
803 "movq %%rax, %[val]\n\t"
804 : [val]"=r"(val) : [mem]"r"(mem) : "%rax", "cc");
Thomas Hutha2998952019-12-06 12:31:02 +0100805 report(val == 0x12345678ul, "cmovnel");
Nadav Amit30762172014-06-18 17:19:53 +0300806}
807
Peter Feinerc2aa6122016-03-02 17:09:31 -0800808static unsigned long rip_advance;
809
810static void advance_rip_and_note_exception(struct ex_regs *regs)
Avi Kivityd7143f32012-03-25 15:49:05 +0200811{
812 ++exceptions;
Peter Feinerc2aa6122016-03-02 17:09:31 -0800813 regs->rip += rip_advance;
Avi Kivityd7143f32012-03-25 15:49:05 +0200814}
815
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200816static void test_mmx_movq_mf(uint64_t *mem)
Avi Kivityd7143f32012-03-25 15:49:05 +0200817{
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800818 /* movq %mm0, (%rax) */
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200819 extern char movq_start, movq_end;
Avi Kivityd7143f32012-03-25 15:49:05 +0200820
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200821 uint16_t fcw = 0; /* all exceptions unmasked */
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800822 write_cr0(read_cr0() & ~6); /* TS, EM */
Avi Kivityd7143f32012-03-25 15:49:05 +0200823 exceptions = 0;
Peter Feinerc2aa6122016-03-02 17:09:31 -0800824 handle_exception(MF_VECTOR, advance_rip_and_note_exception);
Avi Kivityd7143f32012-03-25 15:49:05 +0200825 asm volatile("fninit; fldcw %0" : : "m"(fcw));
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800826 asm volatile("fldz; fldz; fdivp"); /* generate exception */
827
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200828 rip_advance = &movq_end - &movq_start;
829 asm(KVM_FEP "movq_start: movq %mm0, (%rax); movq_end:");
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800830 /* exit MMX mode */
Avi Kivityd7143f32012-03-25 15:49:05 +0200831 asm volatile("fnclex; emms");
Thomas Hutha2998952019-12-06 12:31:02 +0100832 report(exceptions == 1, "movq mmx generates #MF");
Avi Kivityd7143f32012-03-25 15:49:05 +0200833 handle_exception(MF_VECTOR, 0);
834}
835
Nadav Amitf413c1a2014-09-16 03:26:23 +0300836static void test_jmp_noncanonical(uint64_t *mem)
837{
Peter Feinerc2aa6122016-03-02 17:09:31 -0800838 extern char nc_jmp_start, nc_jmp_end;
839
Nadav Amitf413c1a2014-09-16 03:26:23 +0300840 *mem = 0x1111111111111111ul;
841
842 exceptions = 0;
Peter Feinerc2aa6122016-03-02 17:09:31 -0800843 rip_advance = &nc_jmp_end - &nc_jmp_start;
844 handle_exception(GP_VECTOR, advance_rip_and_note_exception);
845 asm volatile ("nc_jmp_start: jmp *%0; nc_jmp_end:" : : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100846 report(exceptions == 1, "jump to non-canonical address");
Nadav Amitf413c1a2014-09-16 03:26:23 +0300847 handle_exception(GP_VECTOR, 0);
848}
849
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200850static void test_movabs(uint64_t *mem)
Paolo Bonzini59033f42012-12-13 13:11:55 +0100851{
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800852 /* mov $0x9090909090909090, %rcx */
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200853 unsigned long rcx;
854 asm(KVM_FEP "mov $0x9090909090909090, %0" : "=c" (rcx) : "0" (0));
Thomas Hutha2998952019-12-06 12:31:02 +0100855 report(rcx == 0x9090909090909090, "64-bit mov imm2");
Paolo Bonzini59033f42012-12-13 13:11:55 +0100856}
857
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200858static void test_smsw_reg(uint64_t *mem)
Nadav Amit313f4ef2014-06-08 13:02:45 +0300859{
860 unsigned long cr0 = read_cr0();
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200861 unsigned long rax;
862 const unsigned long in_rax = 0x1234567890abcdeful;
Nadav Amit313f4ef2014-06-08 13:02:45 +0300863
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200864 asm(KVM_FEP "smsww %w0\n\t" : "=a" (rax) : "0" (in_rax));
Thomas Hutha2998952019-12-06 12:31:02 +0100865 report((u16)rax == (u16)cr0 && rax >> 16 == in_rax >> 16,
866 "16-bit smsw reg");
Nadav Amit313f4ef2014-06-08 13:02:45 +0300867
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200868 asm(KVM_FEP "smswl %k0\n\t" : "=a" (rax) : "0" (in_rax));
Thomas Hutha2998952019-12-06 12:31:02 +0100869 report(rax == (u32)cr0, "32-bit smsw reg");
Nadav Amit313f4ef2014-06-08 13:02:45 +0300870
Bill Wendling2f394042019-09-09 14:28:22 -0700871 asm(KVM_FEP "smswq %q0\n\t" : "=a" (rax) : "0" (in_rax));
Thomas Hutha2998952019-12-06 12:31:02 +0100872 report(rax == cr0, "64-bit smsw reg");
Nadav Amit313f4ef2014-06-08 13:02:45 +0300873}
874
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200875static void test_nop(uint64_t *mem)
Nadav Amitae399012014-06-18 17:19:50 +0300876{
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200877 unsigned long rax;
878 const unsigned long in_rax = 0x1234567890abcdeful;
879 asm(KVM_FEP "nop\n\t" : "=a" (rax) : "0" (in_rax));
Thomas Hutha2998952019-12-06 12:31:02 +0100880 report(rax == in_rax, "nop");
Nadav Amitae399012014-06-18 17:19:50 +0300881}
882
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200883static void test_mov_dr(uint64_t *mem)
Nadav Amitb39a3e12014-07-15 17:41:56 +0300884{
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200885 unsigned long rax;
886 const unsigned long in_rax = 0;
Krish Sadhukhanbadc98c2019-07-30 17:52:56 -0400887 bool rtm_support = this_cpu_has(X86_FEATURE_RTM);
Nadav Amitb39a3e12014-07-15 17:41:56 +0300888 unsigned long dr6_fixed_1 = rtm_support ? 0xfffe0ff0ul : 0xffff0ff0ul;
Paolo Bonzini45fdc222018-04-11 15:01:42 +0200889 asm(KVM_FEP "movq %0, %%dr6\n\t"
890 KVM_FEP "movq %%dr6, %0\n\t" : "=a" (rax) : "a" (in_rax));
Thomas Hutha2998952019-12-06 12:31:02 +0100891 report(rax == dr6_fixed_1, "mov_dr6");
Nadav Amitb39a3e12014-07-15 17:41:56 +0300892}
893
Nadav Amit26311ca2014-09-16 03:26:22 +0300894static void test_push16(uint64_t *mem)
895{
896 uint64_t rsp1, rsp2;
897 uint16_t r;
898
899 asm volatile ( "movq %%rsp, %[rsp1]\n\t"
900 "pushw %[v]\n\t"
901 "popw %[r]\n\t"
902 "movq %%rsp, %[rsp2]\n\t"
903 "movq %[rsp1], %%rsp\n\t" :
904 [rsp1]"=r"(rsp1), [rsp2]"=r"(rsp2), [r]"=r"(r)
905 : [v]"m"(*mem) : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100906 report(rsp1 == rsp2, "push16");
Nadav Amit26311ca2014-09-16 03:26:22 +0300907}
908
Avi Kivityec278ce2012-04-18 19:27:00 +0300909static void test_crosspage_mmio(volatile uint8_t *mem)
910{
911 volatile uint16_t w, *pw;
912
913 pw = (volatile uint16_t *)&mem[4095];
914 mem[4095] = 0x99;
915 mem[4096] = 0x77;
916 asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +0100917 report(w == 0x7799, "cross-page mmio read");
Avi Kivityec278ce2012-04-18 19:27:00 +0300918 asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa));
Thomas Hutha2998952019-12-06 12:31:02 +0100919 report(mem[4095] == 0xaa && mem[4096] == 0x88, "cross-page mmio write");
Avi Kivityec278ce2012-04-18 19:27:00 +0300920}
921
Xiao Guangronga19c7db2012-10-19 15:39:08 +0800922static void test_string_io_mmio(volatile uint8_t *mem)
923{
924 /* Cross MMIO pages.*/
925 volatile uint8_t *mmio = mem + 4032;
926
927 asm volatile("outw %%ax, %%dx \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT));
928
929 asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024));
930
Thomas Hutha2998952019-12-06 12:31:02 +0100931 report(mmio[1023] == 0x99, "string_io_mmio");
Xiao Guangronga19c7db2012-10-19 15:39:08 +0800932}
933
Jan Kiszka56c6afa2013-12-16 10:57:17 +0100934/* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */
935#if 0
Avi Kivity47c14612012-06-27 11:36:32 +0300936static void test_lgdt_lidt(volatile uint8_t *mem)
937{
938 struct descriptor_table_ptr orig, fresh = {};
939
940 sgdt(&orig);
941 *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
942 .limit = 0xf234,
943 .base = 0x12345678abcd,
944 };
945 cli();
946 asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
947 sgdt(&fresh);
948 lgdt(&orig);
949 sti();
Thomas Hutha2998952019-12-06 12:31:02 +0100950 report(orig.limit == fresh.limit && orig.base == fresh.base,
951 "lgdt (long address)");
Avi Kivity47c14612012-06-27 11:36:32 +0300952
953 sidt(&orig);
954 *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
955 .limit = 0x432f,
956 .base = 0xdbca87654321,
957 };
958 cli();
959 asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
960 sidt(&fresh);
961 lidt(&orig);
962 sti();
Thomas Hutha2998952019-12-06 12:31:02 +0100963 report(orig.limit == fresh.limit && orig.base == fresh.base,
964 "lidt (long address)");
Avi Kivity47c14612012-06-27 11:36:32 +0300965}
Jan Kiszka56c6afa2013-12-16 10:57:17 +0100966#endif
Avi Kivity47c14612012-06-27 11:36:32 +0300967
Avi Kivityfc2f8802012-06-27 11:36:33 +0300968static void ss_bad_rpl(struct ex_regs *regs)
969{
970 extern char ss_bad_rpl_cont;
971
972 ++exceptions;
973 regs->rip = (ulong)&ss_bad_rpl_cont;
974}
975
976static void test_sreg(volatile uint16_t *mem)
977{
978 u16 ss = read_ss();
979
980 // check for null segment load
981 *mem = 0;
982 asm volatile("mov %0, %%ss" : : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100983 report(read_ss() == 0, "mov null, %%ss");
Avi Kivityfc2f8802012-06-27 11:36:33 +0300984
985 // check for exception when ss.rpl != cpl on null segment load
986 exceptions = 0;
987 handle_exception(GP_VECTOR, ss_bad_rpl);
988 *mem = 3;
989 asm volatile("mov %0, %%ss; ss_bad_rpl_cont:" : : "m"(*mem));
Thomas Hutha2998952019-12-06 12:31:02 +0100990 report(exceptions == 1 && read_ss() == 0,
991 "mov null, %%ss (with ss.rpl != cpl)");
Avi Kivityfc2f8802012-06-27 11:36:33 +0300992 handle_exception(GP_VECTOR, 0);
993 write_ss(ss);
994}
995
Bin Meng3ee1b912020-11-24 16:33:00 +0800996static uint64_t usr_gs_mov(void)
997{
998 static uint64_t dummy = MAGIC_NUM;
999 uint64_t dummy_ptr = (uint64_t)&dummy;
1000 uint64_t ret;
1001
1002 dummy_ptr -= GS_BASE;
1003 asm volatile("mov %%gs:(%%rcx), %%rax" : "=a"(ret): "c"(dummy_ptr) :);
1004
1005 return ret;
1006}
1007
1008static void test_iret(void)
1009{
1010 uint64_t val;
1011 bool raised_vector;
1012
1013 /* Update GS base to 4MiB */
1014 wrmsr(MSR_GS_BASE, GS_BASE);
1015
1016 /*
1017 * Per the SDM, jumping to user mode via `iret`, which is returning to
1018 * outer privilege level, for segment registers (ES, FS, GS, and DS)
1019 * if the check fails, the segment selector becomes null.
1020 *
1021 * In our test case, GS becomes null.
1022 */
1023 val = run_in_user((usermode_func)usr_gs_mov, GP_VECTOR,
1024 0, 0, 0, 0, &raised_vector);
1025
1026 report(val == MAGIC_NUM, "Test ret/iret with a nullified segment");
1027}
1028
Peter Feiner4425dba2016-03-02 17:09:32 -08001029/* Broken emulation causes triple fault, which skips the other tests. */
1030#if 0
Avi Kivitycb615a42012-06-27 11:36:40 +03001031static void test_lldt(volatile uint16_t *mem)
1032{
Peter Feiner4425dba2016-03-02 17:09:32 -08001033 u64 gdt[] = { 0, /* null descriptor */
1034#ifdef __X86_64__
1035 0, /* ldt descriptor is 16 bytes in long mode */
1036#endif
1037 0x0000f82000000ffffull /* ldt descriptor */ };
1038 struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1,
1039 .base = (ulong)&gdt };
Avi Kivitycb615a42012-06-27 11:36:40 +03001040 struct descriptor_table_ptr orig_gdt;
1041
1042 cli();
1043 sgdt(&orig_gdt);
1044 lgdt(&gdt_ptr);
1045 *mem = 0x8;
1046 asm volatile("lldt %0" : : "m"(*mem));
1047 lgdt(&orig_gdt);
1048 sti();
Thomas Hutha2998952019-12-06 12:31:02 +01001049 report(sldt() == *mem, "lldt");
Avi Kivitycb615a42012-06-27 11:36:40 +03001050}
Peter Feiner4425dba2016-03-02 17:09:32 -08001051#endif
Avi Kivitycb615a42012-06-27 11:36:40 +03001052
Avi Kivity58a9d812012-06-27 11:36:41 +03001053static void test_ltr(volatile uint16_t *mem)
1054{
1055 struct descriptor_table_ptr gdt_ptr;
1056 uint64_t *gdt, *trp;
1057 uint16_t tr = str();
1058 uint64_t busy_mask = (uint64_t)1 << 41;
1059
1060 sgdt(&gdt_ptr);
1061 gdt = (uint64_t *)gdt_ptr.base;
1062 trp = &gdt[tr >> 3];
1063 *trp &= ~busy_mask;
1064 *mem = tr;
1065 asm volatile("ltr %0" : : "m"(*mem) : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +01001066 report(str() == tr && (*trp & busy_mask), "ltr");
Avi Kivity58a9d812012-06-27 11:36:41 +03001067}
1068
Avi Kivity6cff92d2013-01-04 16:17:04 +02001069static void test_simplealu(u32 *mem)
1070{
1071 *mem = 0x1234;
1072 asm("or %1, %0" : "+m"(*mem) : "r"(0x8001));
Thomas Hutha2998952019-12-06 12:31:02 +01001073 report(*mem == 0x9235, "or");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001074 asm("add %1, %0" : "+m"(*mem) : "r"(2));
Thomas Hutha2998952019-12-06 12:31:02 +01001075 report(*mem == 0x9237, "add");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001076 asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111));
Thomas Hutha2998952019-12-06 12:31:02 +01001077 report(*mem == 0x8326, "xor");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001078 asm("sub %1, %0" : "+m"(*mem) : "r"(0x26));
Thomas Hutha2998952019-12-06 12:31:02 +01001079 report(*mem == 0x8300, "sub");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001080 asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
Thomas Hutha2998952019-12-06 12:31:02 +01001081 report(*mem == 0x8400, "adc(0)");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001082 asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
Thomas Hutha2998952019-12-06 12:31:02 +01001083 report(*mem == 0x8501, "adc(0)");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001084 asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0));
Thomas Hutha2998952019-12-06 12:31:02 +01001085 report(*mem == 0x8501, "sbb(0)");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001086 asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0));
Thomas Hutha2998952019-12-06 12:31:02 +01001087 report(*mem == 0x8500, "sbb(1)");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001088 asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77));
Thomas Hutha2998952019-12-06 12:31:02 +01001089 report(*mem == 0x8400, "and");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001090 asm("test %1, %0" : "+m"(*mem) : "r"(0xf000));
Thomas Hutha2998952019-12-06 12:31:02 +01001091 report(*mem == 0x8400, "test");
Avi Kivity6cff92d2013-01-04 16:17:04 +02001092}
1093
Nadav Amit70bdcad2014-11-26 23:42:00 +02001094static void illegal_movbe_handler(struct ex_regs *regs)
1095{
1096 extern char bad_movbe_cont;
1097
1098 ++exceptions;
1099 regs->rip = (ulong)&bad_movbe_cont;
1100}
1101
1102static void test_illegal_movbe(void)
1103{
Krish Sadhukhanbadc98c2019-07-30 17:52:56 -04001104 if (!this_cpu_has(X86_FEATURE_MOVBE)) {
Radim Krčmář32b96032016-05-11 18:12:44 +02001105 report_skip("illegal movbe");
Nadav Amit70bdcad2014-11-26 23:42:00 +02001106 return;
1107 }
1108
1109 exceptions = 0;
1110 handle_exception(UD_VECTOR, illegal_movbe_handler);
1111 asm volatile(".byte 0x0f; .byte 0x38; .byte 0xf0; .byte 0xc0;\n\t"
1112 " bad_movbe_cont:" : : : "rax");
Thomas Hutha2998952019-12-06 12:31:02 +01001113 report(exceptions == 1, "illegal movbe");
Nadav Amit70bdcad2014-11-26 23:42:00 +02001114 handle_exception(UD_VECTOR, 0);
1115}
1116
Paolo Bonzini45fdc222018-04-11 15:01:42 +02001117static void record_no_fep(struct ex_regs *regs)
1118{
1119 fep_available = 0;
1120 regs->rip += KVM_FEP_LENGTH;
1121}
1122
Thomas Huth7db17e22018-06-27 10:02:07 +02001123int main(void)
Avi Kivity7d36db32010-08-03 14:07:34 +03001124{
1125 void *mem;
Paolo Bonzini45fdc222018-04-11 15:01:42 +02001126 void *insn_page;
Avi Kivity8cfa5a02011-06-19 19:50:55 +03001127 void *insn_ram;
Jacob Xue5e76262021-04-21 16:12:58 -07001128 void *cross_mem;
Avi Kivity7d36db32010-08-03 14:07:34 +03001129 unsigned long t1, t2;
1130
1131 setup_vm();
Paolo Bonzini45fdc222018-04-11 15:01:42 +02001132 handle_exception(UD_VECTOR, record_no_fep);
1133 asm(KVM_FEP "nop");
1134 handle_exception(UD_VECTOR, 0);
1135
Avi Kivityec278ce2012-04-18 19:27:00 +03001136 mem = alloc_vpages(2);
1137 install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem);
1138 // install the page twice to test cross-page mmio
1139 install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096);
Avi Kivityd7143f32012-03-25 15:49:05 +02001140 insn_page = alloc_page();
Avi Kivityd7143f32012-03-25 15:49:05 +02001141 insn_ram = vmap(virt_to_phys(insn_page), 4096);
Jacob Xue5e76262021-04-21 16:12:58 -07001142 cross_mem = vmap(virt_to_phys(alloc_pages(2)), 2 * PAGE_SIZE);
Avi Kivity7d36db32010-08-03 14:07:34 +03001143
1144 // test mov reg, r/m and mov r/m, reg
1145 t1 = 0x123456789abcdef;
1146 asm volatile("mov %[t1], (%[mem]) \n\t"
1147 "mov (%[mem]), %[t2]"
1148 : [t2]"=r"(t2)
1149 : [t1]"r"(t1), [mem]"r"(mem)
1150 : "memory");
Thomas Hutha2998952019-12-06 12:31:02 +01001151 report(t2 == 0x123456789abcdef, "mov reg, r/m (1)");
Avi Kivity7d36db32010-08-03 14:07:34 +03001152
Avi Kivity6cff92d2013-01-04 16:17:04 +02001153 test_simplealu(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +03001154 test_cmps(mem);
Avi Kivity80a4ea72010-08-17 17:44:14 +03001155 test_scas(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +03001156
1157 test_push(mem);
1158 test_pop(mem);
1159
1160 test_xchg(mem);
Wei Yongjun5647d552010-08-12 21:44:01 +08001161 test_xadd(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +03001162
1163 test_cr8();
1164
Nadav Amit40039632014-06-06 02:56:52 +03001165 test_smsw(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +03001166 test_lmsw();
1167 test_ljmp(mem);
1168 test_stringio();
1169 test_incdecnotneg(mem);
Wei Yongjund4655ea2010-08-05 14:09:15 +08001170 test_btc(mem);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +08001171 test_bsfbsr(mem);
Avi Kivity51d65a32010-08-19 19:15:31 +03001172 test_imul(mem);
Avi Kivityc2c43fc2013-02-09 11:32:25 +02001173 test_muldiv(mem);
Avi Kivityd7f3ee32011-03-29 14:44:49 +02001174 test_sse(mem);
Jacob Xue5e76262021-04-21 16:12:58 -07001175 test_sse_exceptions(cross_mem);
Avi Kivity35870822012-03-22 12:58:06 +02001176 test_mmx(mem);
Avi Kivity8cfa5a02011-06-19 19:50:55 +03001177 test_rip_relative(mem, insn_ram);
Avi Kivityb212fcd2011-09-13 11:15:16 +03001178 test_shld_shrd(mem);
Avi Kivity47c14612012-06-27 11:36:32 +03001179 //test_lgdt_lidt(mem);
Avi Kivityfc2f8802012-06-27 11:36:33 +03001180 test_sreg(mem);
Bin Meng3ee1b912020-11-24 16:33:00 +08001181 test_iret();
Peter Feiner4425dba2016-03-02 17:09:32 -08001182 //test_lldt(mem);
Avi Kivity58a9d812012-06-27 11:36:41 +03001183 test_ltr(mem);
Nadav Amit30762172014-06-18 17:19:53 +03001184 test_cmov(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +03001185
Paolo Bonzini45fdc222018-04-11 15:01:42 +02001186 if (fep_available) {
1187 test_mmx_movq_mf(mem);
1188 test_movabs(mem);
1189 test_smsw_reg(mem);
1190 test_nop(mem);
1191 test_mov_dr(mem);
1192 } else {
1193 report_skip("skipping register-only tests, "
Siddharth Chandrasekaran622ad982021-06-09 16:02:17 +02001194 "use kvm.force_emulation_prefix=1 to enable");
Paolo Bonzini45fdc222018-04-11 15:01:42 +02001195 }
1196
Nadav Amit26311ca2014-09-16 03:26:22 +03001197 test_push16(mem);
Avi Kivityec278ce2012-04-18 19:27:00 +03001198 test_crosspage_mmio(mem);
1199
Xiao Guangronga19c7db2012-10-19 15:39:08 +08001200 test_string_io_mmio(mem);
1201
Nadav Amitf413c1a2014-09-16 03:26:23 +03001202 test_jmp_noncanonical(mem);
Nadav Amit70bdcad2014-11-26 23:42:00 +02001203 test_illegal_movbe();
Nadav Amitf413c1a2014-09-16 03:26:23 +03001204
Jan Kiszkaf3cdd152014-01-04 18:59:16 +01001205 return report_summary();
Avi Kivity7d36db32010-08-03 14:07:34 +03001206}