blob: 1296a990ff87721228c74d440fd292b1339f2a2a [file] [log] [blame]
Avi Kivity7d36db32010-08-03 14:07:34 +03001#include "ioram.h"
2#include "vm.h"
3#include "libcflat.h"
Gleb Natapove7c37962010-12-22 17:06:18 +02004#include "desc.h"
Avi Kivityd7143f32012-03-25 15:49:05 +02005#include "types.h"
Avi Kivity7d36db32010-08-03 14:07:34 +03006
7#define memset __builtin_memset
8#define TESTDEV_IO_PORT 0xe0
9
Avi Kivityd7143f32012-03-25 15:49:05 +020010static int exceptions;
11
Arthur Chunqi Lic5a2a732013-06-20 22:36:08 +080012struct regs {
13 u64 rax, rbx, rcx, rdx;
14 u64 rsi, rdi, rsp, rbp;
15 u64 r8, r9, r10, r11;
16 u64 r12, r13, r14, r15;
17 u64 rip, rflags;
18};
19struct regs inregs, outregs, save;
20
21struct insn_desc {
22 u64 ptr;
23 size_t len;
24};
25
Avi Kivity7d36db32010-08-03 14:07:34 +030026static char st1[] = "abcdefghijklmnop";
27
28void test_stringio()
29{
30 unsigned char r = 0;
31 asm volatile("cld \n\t"
32 "movw %0, %%dx \n\t"
33 "rep outsb \n\t"
34 : : "i"((short)TESTDEV_IO_PORT),
35 "S"(st1), "c"(sizeof(st1) - 1));
36 asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
37 report("outsb up", r == st1[sizeof(st1) - 2]); /* last char */
38
39 asm volatile("std \n\t"
40 "movw %0, %%dx \n\t"
41 "rep outsb \n\t"
42 : : "i"((short)TESTDEV_IO_PORT),
43 "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1));
44 asm volatile("cld \n\t" : : );
45 asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
46 report("outsb down", r == st1[0]);
47}
48
49void test_cmps_one(unsigned char *m1, unsigned char *m3)
50{
51 void *rsi, *rdi;
52 long rcx, tmp;
53
54 rsi = m1; rdi = m3; rcx = 30;
55 asm volatile("xor %[tmp], %[tmp] \n\t"
56 "repe/cmpsb"
57 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
58 : : "cc");
59 report("repe/cmpsb (1)", rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30);
60
Avi Kivity51ba4182010-08-17 19:34:39 +030061 rsi = m1; rdi = m3; rcx = 30;
62 asm volatile("or $1, %[tmp]\n\t" // clear ZF
63 "repe/cmpsb"
64 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
65 : : "cc");
66 report("repe/cmpsb (1.zf)", rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30);
67
Avi Kivity7d36db32010-08-03 14:07:34 +030068 rsi = m1; rdi = m3; rcx = 15;
69 asm volatile("xor %[tmp], %[tmp] \n\t"
70 "repe/cmpsw"
71 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
72 : : "cc");
73 report("repe/cmpsw (1)", rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30);
74
75 rsi = m1; rdi = m3; rcx = 7;
76 asm volatile("xor %[tmp], %[tmp] \n\t"
77 "repe/cmpsl"
78 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
79 : : "cc");
80 report("repe/cmpll (1)", rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28);
81
82 rsi = m1; rdi = m3; rcx = 4;
83 asm volatile("xor %[tmp], %[tmp] \n\t"
84 "repe/cmpsq"
85 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
86 : : "cc");
87 report("repe/cmpsq (1)", rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32);
88
89 rsi = m1; rdi = m3; rcx = 130;
90 asm volatile("xor %[tmp], %[tmp] \n\t"
91 "repe/cmpsb"
92 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
93 : : "cc");
94 report("repe/cmpsb (2)",
95 rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101);
96
97 rsi = m1; rdi = m3; rcx = 65;
98 asm volatile("xor %[tmp], %[tmp] \n\t"
99 "repe/cmpsw"
100 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
101 : : "cc");
102 report("repe/cmpsw (2)",
103 rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102);
104
105 rsi = m1; rdi = m3; rcx = 32;
106 asm volatile("xor %[tmp], %[tmp] \n\t"
107 "repe/cmpsl"
108 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
109 : : "cc");
110 report("repe/cmpll (2)",
111 rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104);
112
113 rsi = m1; rdi = m3; rcx = 16;
114 asm volatile("xor %[tmp], %[tmp] \n\t"
115 "repe/cmpsq"
116 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
117 : : "cc");
118 report("repe/cmpsq (2)",
119 rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104);
120
121}
122
123void test_cmps(void *mem)
124{
125 unsigned char *m1 = mem, *m2 = mem + 1024;
126 unsigned char m3[1024];
127
128 for (int i = 0; i < 100; ++i)
129 m1[i] = m2[i] = m3[i] = i;
130 for (int i = 100; i < 200; ++i)
131 m1[i] = (m3[i] = m2[i] = i) + 1;
132 test_cmps_one(m1, m3);
133 test_cmps_one(m1, m2);
134}
135
Avi Kivity80a4ea72010-08-17 17:44:14 +0300136void test_scas(void *mem)
137{
138 bool z;
139 void *di;
140
141 *(ulong *)mem = 0x77665544332211;
142
143 di = mem;
144 asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11));
145 report("scasb match", di == mem + 1 && z);
146
147 di = mem;
148 asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54));
149 report("scasb mismatch", di == mem + 1 && !z);
150
151 di = mem;
152 asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211));
153 report("scasw match", di == mem + 2 && z);
154
155 di = mem;
156 asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11));
157 report("scasw mismatch", di == mem + 2 && !z);
158
159 di = mem;
160 asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff44332211ul));
161 report("scasd match", di == mem + 4 && z);
162
163 di = mem;
164 asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211));
165 report("scasd mismatch", di == mem + 4 && !z);
166
167 di = mem;
168 asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul));
169 report("scasq match", di == mem + 8 && z);
170
171 di = mem;
172 asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3));
173 report("scasq mismatch", di == mem + 8 && !z);
174}
175
Avi Kivity7d36db32010-08-03 14:07:34 +0300176void test_cr8(void)
177{
178 unsigned long src, dst;
179
180 dst = 777;
181 src = 3;
182 asm volatile("mov %[src], %%cr8; mov %%cr8, %[dst]"
183 : [dst]"+r"(dst), [src]"+r"(src));
184 report("mov %cr8", dst == 3 && src == 3);
185}
186
187void test_push(void *mem)
188{
189 unsigned long tmp;
190 unsigned long *stack_top = mem + 4096;
191 unsigned long *new_stack_top;
192 unsigned long memw = 0x123456789abcdeful;
193
194 memset(mem, 0x55, (void *)stack_top - mem);
195
196 asm volatile("mov %%rsp, %[tmp] \n\t"
197 "mov %[stack_top], %%rsp \n\t"
198 "pushq $-7 \n\t"
199 "pushq %[reg] \n\t"
200 "pushq (%[mem]) \n\t"
201 "pushq $-7070707 \n\t"
202 "mov %%rsp, %[new_stack_top] \n\t"
203 "mov %[tmp], %%rsp"
204 : [tmp]"=&r"(tmp), [new_stack_top]"=r"(new_stack_top)
205 : [stack_top]"r"(stack_top),
206 [reg]"r"(-17l), [mem]"r"(&memw)
207 : "memory");
208
209 report("push $imm8", stack_top[-1] == -7ul);
210 report("push %reg", stack_top[-2] == -17ul);
211 report("push mem", stack_top[-3] == 0x123456789abcdeful);
212 report("push $imm", stack_top[-4] == -7070707);
213}
214
215void test_pop(void *mem)
216{
Avi Kivity28f04f22012-06-27 11:36:38 +0300217 unsigned long tmp, tmp3, rsp, rbp;
Avi Kivity7d36db32010-08-03 14:07:34 +0300218 unsigned long *stack_top = mem + 4096;
219 unsigned long memw = 0x123456789abcdeful;
220 static unsigned long tmp2;
221
222 memset(mem, 0x55, (void *)stack_top - mem);
223
224 asm volatile("pushq %[val] \n\t"
225 "popq (%[mem])"
226 : : [val]"m"(memw), [mem]"r"(mem) : "memory");
227 report("pop mem", *(unsigned long *)mem == memw);
228
229 memw = 7 - memw;
230 asm volatile("mov %%rsp, %[tmp] \n\t"
231 "mov %[stack_top], %%rsp \n\t"
232 "pushq %[val] \n\t"
233 "popq %[tmp2] \n\t"
234 "mov %[tmp], %%rsp"
235 : [tmp]"=&r"(tmp), [tmp2]"=m"(tmp2)
236 : [val]"r"(memw), [stack_top]"r"(stack_top)
237 : "memory");
238 report("pop mem (2)", tmp2 == memw);
239
240 memw = 129443 - memw;
241 asm volatile("mov %%rsp, %[tmp] \n\t"
242 "mov %[stack_top], %%rsp \n\t"
243 "pushq %[val] \n\t"
244 "popq %[tmp2] \n\t"
245 "mov %[tmp], %%rsp"
246 : [tmp]"=&r"(tmp), [tmp2]"=r"(tmp2)
247 : [val]"r"(memw), [stack_top]"r"(stack_top)
248 : "memory");
249 report("pop reg", tmp2 == memw);
250
251 asm volatile("mov %%rsp, %[tmp] \n\t"
252 "mov %[stack_top], %%rsp \n\t"
253 "push $1f \n\t"
254 "ret \n\t"
255 "2: jmp 2b \n\t"
256 "1: mov %[tmp], %%rsp"
257 : [tmp]"=&r"(tmp) : [stack_top]"r"(stack_top)
258 : "memory");
259 report("ret", 1);
Avi Kivity5269d6e2012-06-27 11:36:31 +0300260
261 stack_top[-1] = 0x778899;
262 asm volatile("mov %%rsp, %[tmp] \n\t"
263 "mov %%rbp, %[tmp3] \n\t"
264 "mov %[stack_top], %%rbp \n\t"
265 "leave \n\t"
266 "xchg %%rsp, %[tmp] \n\t"
267 "xchg %%rbp, %[tmp3]"
268 : [tmp]"=&r"(tmp), [tmp3]"=&r"(tmp3) : [stack_top]"r"(stack_top-1)
269 : "memory");
270 report("leave", tmp == (ulong)stack_top && tmp3 == 0x778899);
Avi Kivity28f04f22012-06-27 11:36:38 +0300271
272 rbp = 0xaa55aa55bb66bb66ULL;
273 rsp = (unsigned long)stack_top;
274 asm volatile("xchg %%rsp, %[rsp] \n\t"
275 "xchg %%rbp, %[rbp] \n\t"
276 "enter $0x1238, $0 \n\t"
277 "xchg %%rsp, %[rsp] \n\t"
278 "xchg %%rbp, %[rbp]"
279 : [rsp]"+a"(rsp), [rbp]"+b"(rbp) : : "memory");
280 report("enter",
281 rsp == (unsigned long)stack_top - 8 - 0x1238
282 && rbp == (unsigned long)stack_top - 8
283 && stack_top[-1] == 0xaa55aa55bb66bb66ULL);
Avi Kivity7d36db32010-08-03 14:07:34 +0300284}
285
286void test_ljmp(void *mem)
287{
288 unsigned char *m = mem;
289 volatile int res = 1;
290
291 *(unsigned long**)m = &&jmpf;
292 asm volatile ("data16/mov %%cs, %0":"=m"(*(m + sizeof(unsigned long))));
293 asm volatile ("rex64/ljmp *%0"::"m"(*m));
294 res = 0;
295jmpf:
296 report("ljmp", res);
297}
298
299void test_incdecnotneg(void *mem)
300{
301 unsigned long *m = mem, v = 1234;
302 unsigned char *mb = mem, vb = 66;
303
304 *m = 0;
305
306 asm volatile ("incl %0":"+m"(*m));
307 report("incl", *m == 1);
308 asm volatile ("decl %0":"+m"(*m));
309 report("decl", *m == 0);
310 asm volatile ("incb %0":"+m"(*m));
311 report("incb", *m == 1);
312 asm volatile ("decb %0":"+m"(*m));
313 report("decb", *m == 0);
314
315 asm volatile ("lock incl %0":"+m"(*m));
316 report("lock incl", *m == 1);
317 asm volatile ("lock decl %0":"+m"(*m));
318 report("lock decl", *m == 0);
319 asm volatile ("lock incb %0":"+m"(*m));
320 report("lock incb", *m == 1);
321 asm volatile ("lock decb %0":"+m"(*m));
322 report("lock decb", *m == 0);
323
324 *m = v;
325
326 asm ("lock negq %0" : "+m"(*m)); v = -v;
327 report("lock negl", *m == v);
328 asm ("lock notq %0" : "+m"(*m)); v = ~v;
329 report("lock notl", *m == v);
330
331 *mb = vb;
332
333 asm ("lock negb %0" : "+m"(*mb)); vb = -vb;
334 report("lock negb", *mb == vb);
335 asm ("lock notb %0" : "+m"(*mb)); vb = ~vb;
336 report("lock notb", *mb == vb);
337}
338
339void test_smsw(void)
340{
341 char mem[16];
342 unsigned short msw, msw_orig, *pmsw;
343 int i, zero;
344
345 msw_orig = read_cr0();
346
347 asm("smsw %0" : "=r"(msw));
348 report("smsw (1)", msw == msw_orig);
349
350 memset(mem, 0, 16);
351 pmsw = (void *)mem;
352 asm("smsw %0" : "=m"(pmsw[4]));
353 zero = 1;
354 for (i = 0; i < 8; ++i)
355 if (i != 4 && pmsw[i])
356 zero = 0;
357 report("smsw (2)", msw == pmsw[4] && zero);
358}
359
360void test_lmsw(void)
361{
362 char mem[16];
363 unsigned short msw, *pmsw;
364 unsigned long cr0;
365
366 cr0 = read_cr0();
367
368 msw = cr0 ^ 8;
369 asm("lmsw %0" : : "r"(msw));
370 printf("before %lx after %lx\n", cr0, read_cr0());
371 report("lmsw (1)", (cr0 ^ read_cr0()) == 8);
372
373 pmsw = (void *)mem;
374 *pmsw = cr0;
375 asm("lmsw %0" : : "m"(*pmsw));
376 printf("before %lx after %lx\n", cr0, read_cr0());
377 report("lmsw (2)", cr0 == read_cr0());
378
379 /* lmsw can't clear cr0.pe */
380 msw = (cr0 & ~1ul) ^ 4; /* change EM to force trap */
381 asm("lmsw %0" : : "r"(msw));
382 report("lmsw (3)", (cr0 ^ read_cr0()) == 4 && (cr0 & 1));
383
384 /* back to normal */
385 msw = cr0;
386 asm("lmsw %0" : : "r"(msw));
387}
388
389void test_xchg(void *mem)
390{
391 unsigned long *memq = mem;
392 unsigned long rax;
393
394 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
395 "mov %%rax, (%[memq])\n\t"
396 "mov $0xfedcba9876543210, %%rax\n\t"
397 "xchg %%al, (%[memq])\n\t"
398 "mov %%rax, %[rax]\n\t"
399 : [rax]"=r"(rax)
400 : [memq]"r"(memq)
401 : "memory");
402 report("xchg reg, r/m (1)",
403 rax == 0xfedcba98765432ef && *memq == 0x123456789abcd10);
404
405 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
406 "mov %%rax, (%[memq])\n\t"
407 "mov $0xfedcba9876543210, %%rax\n\t"
408 "xchg %%ax, (%[memq])\n\t"
409 "mov %%rax, %[rax]\n\t"
410 : [rax]"=r"(rax)
411 : [memq]"r"(memq)
412 : "memory");
413 report("xchg reg, r/m (2)",
414 rax == 0xfedcba987654cdef && *memq == 0x123456789ab3210);
415
416 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
417 "mov %%rax, (%[memq])\n\t"
418 "mov $0xfedcba9876543210, %%rax\n\t"
419 "xchg %%eax, (%[memq])\n\t"
420 "mov %%rax, %[rax]\n\t"
421 : [rax]"=r"(rax)
422 : [memq]"r"(memq)
423 : "memory");
424 report("xchg reg, r/m (3)",
425 rax == 0x89abcdef && *memq == 0x123456776543210);
426
427 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
428 "mov %%rax, (%[memq])\n\t"
429 "mov $0xfedcba9876543210, %%rax\n\t"
430 "xchg %%rax, (%[memq])\n\t"
431 "mov %%rax, %[rax]\n\t"
432 : [rax]"=r"(rax)
433 : [memq]"r"(memq)
434 : "memory");
435 report("xchg reg, r/m (4)",
436 rax == 0x123456789abcdef && *memq == 0xfedcba9876543210);
437}
438
Wei Yongjun5647d552010-08-12 21:44:01 +0800439void test_xadd(void *mem)
440{
441 unsigned long *memq = mem;
442 unsigned long rax;
443
444 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
445 "mov %%rax, (%[memq])\n\t"
446 "mov $0xfedcba9876543210, %%rax\n\t"
447 "xadd %%al, (%[memq])\n\t"
448 "mov %%rax, %[rax]\n\t"
449 : [rax]"=r"(rax)
450 : [memq]"r"(memq)
451 : "memory");
452 report("xadd reg, r/m (1)",
453 rax == 0xfedcba98765432ef && *memq == 0x123456789abcdff);
454
455 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
456 "mov %%rax, (%[memq])\n\t"
457 "mov $0xfedcba9876543210, %%rax\n\t"
458 "xadd %%ax, (%[memq])\n\t"
459 "mov %%rax, %[rax]\n\t"
460 : [rax]"=r"(rax)
461 : [memq]"r"(memq)
462 : "memory");
463 report("xadd reg, r/m (2)",
464 rax == 0xfedcba987654cdef && *memq == 0x123456789abffff);
465
466 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
467 "mov %%rax, (%[memq])\n\t"
468 "mov $0xfedcba9876543210, %%rax\n\t"
469 "xadd %%eax, (%[memq])\n\t"
470 "mov %%rax, %[rax]\n\t"
471 : [rax]"=r"(rax)
472 : [memq]"r"(memq)
473 : "memory");
474 report("xadd reg, r/m (3)",
475 rax == 0x89abcdef && *memq == 0x1234567ffffffff);
476
477 asm volatile("mov $0x123456789abcdef, %%rax\n\t"
478 "mov %%rax, (%[memq])\n\t"
479 "mov $0xfedcba9876543210, %%rax\n\t"
480 "xadd %%rax, (%[memq])\n\t"
481 "mov %%rax, %[rax]\n\t"
482 : [rax]"=r"(rax)
483 : [memq]"r"(memq)
484 : "memory");
485 report("xadd reg, r/m (4)",
486 rax == 0x123456789abcdef && *memq == 0xffffffffffffffff);
487}
488
Wei Yongjund4655ea2010-08-05 14:09:15 +0800489void test_btc(void *mem)
490{
491 unsigned int *a = mem;
492
493 memset(mem, 0, 3 * sizeof(unsigned int));
494
495 asm ("btcl $32, %0" :: "m"(a[0]) : "memory");
496 asm ("btcl $1, %0" :: "m"(a[1]) : "memory");
497 asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory");
498 report("btcl imm8, r/m", a[0] == 1 && a[1] == 2 && a[2] == 4);
499
500 asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory");
501 report("btcl reg, r/m", a[0] == 1 && a[1] == 2 && a[2] == 0x80000004);
502}
503
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800504void test_bsfbsr(void *mem)
505{
Avi Kivity554de462011-11-28 15:09:34 +0200506 unsigned long rax, *memq = mem;
507 unsigned eax, *meml = mem;
508 unsigned short ax, *memw = mem;
509 unsigned char z;
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800510
Avi Kivity554de462011-11-28 15:09:34 +0200511 *memw = 0xc000;
512 asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
513 report("bsfw r/m, reg", ax == 14);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800514
Avi Kivity554de462011-11-28 15:09:34 +0200515 *meml = 0xc0000000;
516 asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
517 report("bsfl r/m, reg", eax == 30);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800518
Avi Kivity554de462011-11-28 15:09:34 +0200519 *memq = 0xc00000000000;
520 asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800521 report("bsfq r/m, reg", rax == 46);
522
Avi Kivity554de462011-11-28 15:09:34 +0200523 *memq = 0;
524 asm("bsfq %[mem], %[a]; setz %[z]"
525 : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
526 report("bsfq r/m, reg", z == 1);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800527
Avi Kivity554de462011-11-28 15:09:34 +0200528 *memw = 0xc000;
529 asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
530 report("bsrw r/m, reg", ax == 15);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800531
Avi Kivity554de462011-11-28 15:09:34 +0200532 *meml = 0xc0000000;
533 asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
534 report("bsrl r/m, reg", eax == 31);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800535
Avi Kivity554de462011-11-28 15:09:34 +0200536 *memq = 0xc00000000000;
537 asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800538 report("bsrq r/m, reg", rax == 47);
539
Avi Kivity554de462011-11-28 15:09:34 +0200540 *memq = 0;
541 asm("bsrq %[mem], %[a]; setz %[z]"
542 : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
543 report("bsrq r/m, reg", z == 1);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +0800544}
545
Avi Kivity51d65a32010-08-19 19:15:31 +0300546static void test_imul(ulong *mem)
547{
548 ulong a;
549
550 *mem = 51; a = 0x1234567812345678UL;
551 asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem));
552 report("imul ax, mem", a == 0x12345678123439e8);
553
554 *mem = 51; a = 0x1234567812345678UL;
555 asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem));
556 report("imul eax, mem", a == 0xa06d39e8);
557
558 *mem = 51; a = 0x1234567812345678UL;
559 asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem));
560 report("imul rax, mem", a == 0xA06D39EBA06D39E8UL);
561
562 *mem = 0x1234567812345678UL; a = 0x8765432187654321L;
563 asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem));
564 report("imul ax, mem, imm8", a == 0x87654321876539e8);
565
566 *mem = 0x1234567812345678UL;
567 asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem));
568 report("imul eax, mem, imm8", a == 0xa06d39e8);
569
570 *mem = 0x1234567812345678UL;
571 asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem));
572 report("imul rax, mem, imm8", a == 0xA06D39EBA06D39E8UL);
573
574 *mem = 0x1234567812345678UL; a = 0x8765432187654321L;
575 asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem));
576 report("imul ax, mem, imm", a == 0x8765432187650bc8);
577
578 *mem = 0x1234567812345678UL;
579 asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem));
580 report("imul eax, mem, imm", a == 0x1d950bc8);
581
582 *mem = 0x1234567812345678UL;
583 asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem));
584 report("imul rax, mem, imm", a == 0x1D950BDE1D950BC8L);
585}
586
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200587static void test_muldiv(long *mem)
Avi Kivityf12d86b2010-08-24 14:01:11 +0300588{
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200589 long a, d, aa, dd;
Avi Kivityf12d86b2010-08-24 14:01:11 +0300590 u8 ex = 1;
591
592 *mem = 0; a = 1; d = 2;
593 asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
594 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
595 report("divq (fault)", a == 1 && d == 2 && ex);
596
597 *mem = 987654321098765UL; a = 123456789012345UL; d = 123456789012345UL;
598 asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
599 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
600 report("divq (1)",
601 a == 0x1ffffffb1b963b33ul && d == 0x273ba4384ede2ul && !ex);
Avi Kivityc2c43fc2013-02-09 11:32:25 +0200602 aa = 0x1111111111111111; dd = 0x2222222222222222;
603 *mem = 0x3333333333333333; a = aa; d = dd;
604 asm("mulb %2" : "+a"(a), "+d"(d) : "m"(*mem));
605 report("mulb mem", a == 0x1111111111110363 && d == dd);
606 *mem = 0x3333333333333333; a = aa; d = dd;
607 asm("mulw %2" : "+a"(a), "+d"(d) : "m"(*mem));
608 report("mulw mem", a == 0x111111111111c963 && d == 0x2222222222220369);
609 *mem = 0x3333333333333333; a = aa; d = dd;
610 asm("mull %2" : "+a"(a), "+d"(d) : "m"(*mem));
611 report("mull mem", a == 0x962fc963 && d == 0x369d036);
612 *mem = 0x3333333333333333; a = aa; d = dd;
613 asm("mulq %2" : "+a"(a), "+d"(d) : "m"(*mem));
614 report("mulq mem", a == 0x2fc962fc962fc963 && d == 0x369d0369d0369d0);
Avi Kivityf12d86b2010-08-24 14:01:11 +0300615}
616
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200617typedef unsigned __attribute__((vector_size(16))) sse128;
618
619typedef union {
620 sse128 sse;
621 unsigned u[4];
622} sse_union;
623
624static bool sseeq(sse_union *v1, sse_union *v2)
625{
626 bool ok = true;
627 int i;
628
629 for (i = 0; i < 4; ++i) {
630 ok &= v1->u[i] == v2->u[i];
631 }
632
633 return ok;
634}
635
636static void test_sse(sse_union *mem)
637{
638 sse_union v;
639
640 write_cr0(read_cr0() & ~6); /* EM, TS */
641 write_cr4(read_cr4() | 0x200); /* OSFXSR */
642 v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
643 asm("movdqu %1, %0" : "=m"(*mem) : "x"(v.sse));
644 report("movdqu (read)", sseeq(&v, mem));
645 mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
646 asm("movdqu %1, %0" : "=x"(v.sse) : "m"(*mem));
647 report("movdqu (write)", sseeq(mem, &v));
Igor Mammedov290ed5d2014-03-15 21:03:37 +0100648
649 v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
650 asm("movaps %1, %0" : "=m"(*mem) : "x"(v.sse));
651 report("movaps (read)", sseeq(mem, &v));
652 mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
653 asm("movaps %1, %0" : "=x"(v.sse) : "m"(*mem));
654 report("movaps (write)", sseeq(&v, mem));
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200655}
656
Avi Kivity35870822012-03-22 12:58:06 +0200657static void test_mmx(uint64_t *mem)
658{
659 uint64_t v;
660
661 write_cr0(read_cr0() & ~6); /* EM, TS */
662 asm volatile("fninit");
663 v = 0x0102030405060708ULL;
664 asm("movq %1, %0" : "=m"(*mem) : "y"(v));
665 report("movq (mmx, read)", v == *mem);
666 *mem = 0x8070605040302010ull;
667 asm("movq %1, %0" : "=y"(v) : "m"(*mem));
668 report("movq (mmx, write)", v == *mem);
669}
670
Avi Kivity8cfa5a02011-06-19 19:50:55 +0300671static void test_rip_relative(unsigned *mem, char *insn_ram)
672{
673 /* movb $1, mem+2(%rip) */
674 insn_ram[0] = 0xc6;
675 insn_ram[1] = 0x05;
676 *(unsigned *)&insn_ram[2] = 2 + (char *)mem - (insn_ram + 7);
677 insn_ram[6] = 0x01;
678 /* ret */
679 insn_ram[7] = 0xc3;
680
681 *mem = 0;
682 asm("callq *%1" : "+m"(*mem) : "r"(insn_ram));
683 report("movb $imm, 0(%rip)", *mem == 0x10000);
684}
Avi Kivityd7f3ee32011-03-29 14:44:49 +0200685
Avi Kivityb212fcd2011-09-13 11:15:16 +0300686static void test_shld_shrd(u32 *mem)
687{
688 *mem = 0x12345678;
689 asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3));
690 report("shld (cl)", *mem == ((0x12345678 << 3) | 5));
691 *mem = 0x12345678;
692 asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3));
693 report("shrd (cl)", *mem == ((0x12345678 >> 3) | (5u << 29)));
694}
695
Arthur Chunqi Lic5a2a732013-06-20 22:36:08 +0800696#define INSN_XCHG_ALL \
697 "xchg %rax, 0+save \n\t" \
698 "xchg %rbx, 8+save \n\t" \
699 "xchg %rcx, 16+save \n\t" \
700 "xchg %rdx, 24+save \n\t" \
701 "xchg %rsi, 32+save \n\t" \
702 "xchg %rdi, 40+save \n\t" \
703 "xchg %rsp, 48+save \n\t" \
704 "xchg %rbp, 56+save \n\t" \
705 "xchg %r8, 64+save \n\t" \
706 "xchg %r9, 72+save \n\t" \
707 "xchg %r10, 80+save \n\t" \
708 "xchg %r11, 88+save \n\t" \
709 "xchg %r12, 96+save \n\t" \
710 "xchg %r13, 104+save \n\t" \
711 "xchg %r14, 112+save \n\t" \
712 "xchg %r15, 120+save \n\t"
713
714asm(
715 ".align 4096\n\t"
716 "insn_page:\n\t"
717 "ret\n\t"
718 "pushf\n\t"
719 "push 136+save \n\t"
720 "popf \n\t"
721 INSN_XCHG_ALL
722 "test_insn:\n\t"
723 "in (%dx),%al\n\t"
724 ".skip 31, 0x90\n\t"
725 "test_insn_end:\n\t"
726 INSN_XCHG_ALL
727 "pushf \n\t"
728 "pop 136+save \n\t"
729 "popf \n\t"
730 "ret \n\t"
731 "insn_page_end:\n\t"
732 ".align 4096\n\t"
733);
734
735#define MK_INSN(name, str) \
736 asm ( \
737 ".pushsection .data.insn \n\t" \
738 "insn_" #name ": \n\t" \
739 ".quad 1001f, 1002f - 1001f \n\t" \
740 ".popsection \n\t" \
741 ".pushsection .text.insn, \"ax\" \n\t" \
742 "1001: \n\t" \
743 "insn_code_" #name ": " str " \n\t" \
744 "1002: \n\t" \
745 ".popsection" \
746 ); \
747 extern struct insn_desc insn_##name;
748
749static void trap_emulator(uint64_t *mem, void *alt_insn_page,
750 struct insn_desc *alt_insn)
751{
752 ulong *cr3 = (ulong *)read_cr3();
753 void *insn_ram;
754 extern u8 insn_page[], test_insn[];
755
756 insn_ram = vmap(virt_to_phys(insn_page), 4096);
757 memcpy(alt_insn_page, insn_page, 4096);
758 memcpy(alt_insn_page + (test_insn - insn_page),
759 (void *)(alt_insn->ptr), alt_insn->len);
760 save = inregs;
761
762 /* Load the code TLB with insn_page, but point the page tables at
763 alt_insn_page (and keep the data TLB clear, for AMD decode assist).
764 This will make the CPU trap on the insn_page instruction but the
765 hypervisor will see alt_insn_page. */
766 install_page(cr3, virt_to_phys(insn_page), insn_ram);
767 invlpg(insn_ram);
768 /* Load code TLB */
769 asm volatile("call *%0" : : "r"(insn_ram));
770 install_page(cr3, virt_to_phys(alt_insn_page), insn_ram);
771 /* Trap, let hypervisor emulate at alt_insn_page */
772 asm volatile("call *%0": : "r"(insn_ram+1));
773
774 outregs = save;
775}
776
Avi Kivityd7143f32012-03-25 15:49:05 +0200777static void advance_rip_by_3_and_note_exception(struct ex_regs *regs)
778{
779 ++exceptions;
780 regs->rip += 3;
781}
782
783static void test_mmx_movq_mf(uint64_t *mem, uint8_t *insn_page,
784 uint8_t *alt_insn_page, void *insn_ram)
785{
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800786 uint16_t fcw = 0; /* all exceptions unmasked */
787 /* movq %mm0, (%rax) */
788 void *stack = alloc_page();
Avi Kivityd7143f32012-03-25 15:49:05 +0200789
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800790 write_cr0(read_cr0() & ~6); /* TS, EM */
Avi Kivityd7143f32012-03-25 15:49:05 +0200791 exceptions = 0;
792 handle_exception(MF_VECTOR, advance_rip_by_3_and_note_exception);
Avi Kivityd7143f32012-03-25 15:49:05 +0200793 asm volatile("fninit; fldcw %0" : : "m"(fcw));
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800794 asm volatile("fldz; fldz; fdivp"); /* generate exception */
795
796 MK_INSN(mmx_movq_mf, "movq %mm0, (%rax) \n\t");
797 inregs = (struct regs){ .rsp=(u64)stack+1024 };
798 trap_emulator(mem, alt_insn_page, &insn_mmx_movq_mf);
799 /* exit MMX mode */
Avi Kivityd7143f32012-03-25 15:49:05 +0200800 asm volatile("fnclex; emms");
801 report("movq mmx generates #MF", exceptions == 1);
802 handle_exception(MF_VECTOR, 0);
803}
804
Paolo Bonzini59033f42012-12-13 13:11:55 +0100805static void test_movabs(uint64_t *mem, uint8_t *insn_page,
806 uint8_t *alt_insn_page, void *insn_ram)
807{
Arthur Chunqi Li3af6fbb2013-06-20 22:36:09 +0800808 /* mov $0x9090909090909090, %rcx */
809 MK_INSN(movabs, "mov $0x9090909090909090, %rcx\n\t");
810 inregs = (struct regs){ 0 };
811 trap_emulator(mem, alt_insn_page, &insn_movabs);
812 report("64-bit mov imm2", outregs.rcx == 0x9090909090909090);
Paolo Bonzini59033f42012-12-13 13:11:55 +0100813}
814
Avi Kivityec278ce2012-04-18 19:27:00 +0300815static void test_crosspage_mmio(volatile uint8_t *mem)
816{
817 volatile uint16_t w, *pw;
818
819 pw = (volatile uint16_t *)&mem[4095];
820 mem[4095] = 0x99;
821 mem[4096] = 0x77;
822 asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory");
823 report("cross-page mmio read", w == 0x7799);
824 asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa));
825 report("cross-page mmio write", mem[4095] == 0xaa && mem[4096] == 0x88);
826}
827
Xiao Guangronga19c7db2012-10-19 15:39:08 +0800828static void test_string_io_mmio(volatile uint8_t *mem)
829{
830 /* Cross MMIO pages.*/
831 volatile uint8_t *mmio = mem + 4032;
832
833 asm volatile("outw %%ax, %%dx \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT));
834
835 asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024));
836
837 report("string_io_mmio", mmio[1023] == 0x99);
838}
839
Jan Kiszka56c6afa2013-12-16 10:57:17 +0100840/* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */
841#if 0
Avi Kivity47c14612012-06-27 11:36:32 +0300842static void test_lgdt_lidt(volatile uint8_t *mem)
843{
844 struct descriptor_table_ptr orig, fresh = {};
845
846 sgdt(&orig);
847 *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
848 .limit = 0xf234,
849 .base = 0x12345678abcd,
850 };
851 cli();
852 asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
853 sgdt(&fresh);
854 lgdt(&orig);
855 sti();
856 report("lgdt (long address)", orig.limit == fresh.limit && orig.base == fresh.base);
857
858 sidt(&orig);
859 *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
860 .limit = 0x432f,
861 .base = 0xdbca87654321,
862 };
863 cli();
864 asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
865 sidt(&fresh);
866 lidt(&orig);
867 sti();
868 report("lidt (long address)", orig.limit == fresh.limit && orig.base == fresh.base);
869}
Jan Kiszka56c6afa2013-12-16 10:57:17 +0100870#endif
Avi Kivity47c14612012-06-27 11:36:32 +0300871
Avi Kivityfc2f8802012-06-27 11:36:33 +0300872static void ss_bad_rpl(struct ex_regs *regs)
873{
874 extern char ss_bad_rpl_cont;
875
876 ++exceptions;
877 regs->rip = (ulong)&ss_bad_rpl_cont;
878}
879
880static void test_sreg(volatile uint16_t *mem)
881{
882 u16 ss = read_ss();
883
884 // check for null segment load
885 *mem = 0;
886 asm volatile("mov %0, %%ss" : : "m"(*mem));
887 report("mov null, %ss", read_ss() == 0);
888
889 // check for exception when ss.rpl != cpl on null segment load
890 exceptions = 0;
891 handle_exception(GP_VECTOR, ss_bad_rpl);
892 *mem = 3;
893 asm volatile("mov %0, %%ss; ss_bad_rpl_cont:" : : "m"(*mem));
894 report("mov null, %ss (with ss.rpl != cpl)", exceptions == 1 && read_ss() == 0);
895 handle_exception(GP_VECTOR, 0);
896 write_ss(ss);
897}
898
Avi Kivitycb615a42012-06-27 11:36:40 +0300899static void test_lldt(volatile uint16_t *mem)
900{
901 u64 gdt[] = { 0, 0x0000f82000000ffffull /* ldt descriptor */ };
902 struct descriptor_table_ptr gdt_ptr = { .limit = 0xffff, .base = (ulong)&gdt };
903 struct descriptor_table_ptr orig_gdt;
904
905 cli();
906 sgdt(&orig_gdt);
907 lgdt(&gdt_ptr);
908 *mem = 0x8;
909 asm volatile("lldt %0" : : "m"(*mem));
910 lgdt(&orig_gdt);
911 sti();
912 report("lldt", sldt() == *mem);
913}
914
Avi Kivity58a9d812012-06-27 11:36:41 +0300915static void test_ltr(volatile uint16_t *mem)
916{
917 struct descriptor_table_ptr gdt_ptr;
918 uint64_t *gdt, *trp;
919 uint16_t tr = str();
920 uint64_t busy_mask = (uint64_t)1 << 41;
921
922 sgdt(&gdt_ptr);
923 gdt = (uint64_t *)gdt_ptr.base;
924 trp = &gdt[tr >> 3];
925 *trp &= ~busy_mask;
926 *mem = tr;
927 asm volatile("ltr %0" : : "m"(*mem) : "memory");
928 report("ltr", str() == tr && (*trp & busy_mask));
929}
930
Avi Kivity6cff92d2013-01-04 16:17:04 +0200931static void test_simplealu(u32 *mem)
932{
933 *mem = 0x1234;
934 asm("or %1, %0" : "+m"(*mem) : "r"(0x8001));
935 report("or", *mem == 0x9235);
936 asm("add %1, %0" : "+m"(*mem) : "r"(2));
937 report("add", *mem == 0x9237);
938 asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111));
939 report("xor", *mem == 0x8326);
940 asm("sub %1, %0" : "+m"(*mem) : "r"(0x26));
941 report("sub", *mem == 0x8300);
942 asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
943 report("adc(0)", *mem == 0x8400);
944 asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
945 report("adc(0)", *mem == 0x8501);
946 asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0));
947 report("sbb(0)", *mem == 0x8501);
948 asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0));
949 report("sbb(1)", *mem == 0x8500);
950 asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77));
951 report("and", *mem == 0x8400);
952 asm("test %1, %0" : "+m"(*mem) : "r"(0xf000));
953 report("test", *mem == 0x8400);
954}
955
Avi Kivity7d36db32010-08-03 14:07:34 +0300956int main()
957{
958 void *mem;
Avi Kivityd7143f32012-03-25 15:49:05 +0200959 void *insn_page, *alt_insn_page;
Avi Kivity8cfa5a02011-06-19 19:50:55 +0300960 void *insn_ram;
Avi Kivity7d36db32010-08-03 14:07:34 +0300961 unsigned long t1, t2;
962
963 setup_vm();
Avi Kivitya526e202010-08-24 14:01:10 +0300964 setup_idt();
Avi Kivityec278ce2012-04-18 19:27:00 +0300965 mem = alloc_vpages(2);
966 install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem);
967 // install the page twice to test cross-page mmio
968 install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096);
Avi Kivityd7143f32012-03-25 15:49:05 +0200969 insn_page = alloc_page();
970 alt_insn_page = alloc_page();
971 insn_ram = vmap(virt_to_phys(insn_page), 4096);
Avi Kivity7d36db32010-08-03 14:07:34 +0300972
973 // test mov reg, r/m and mov r/m, reg
974 t1 = 0x123456789abcdef;
975 asm volatile("mov %[t1], (%[mem]) \n\t"
976 "mov (%[mem]), %[t2]"
977 : [t2]"=r"(t2)
978 : [t1]"r"(t1), [mem]"r"(mem)
979 : "memory");
980 report("mov reg, r/m (1)", t2 == 0x123456789abcdef);
981
Avi Kivity6cff92d2013-01-04 16:17:04 +0200982 test_simplealu(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +0300983 test_cmps(mem);
Avi Kivity80a4ea72010-08-17 17:44:14 +0300984 test_scas(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +0300985
986 test_push(mem);
987 test_pop(mem);
988
989 test_xchg(mem);
Wei Yongjun5647d552010-08-12 21:44:01 +0800990 test_xadd(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +0300991
992 test_cr8();
993
994 test_smsw();
995 test_lmsw();
996 test_ljmp(mem);
997 test_stringio();
998 test_incdecnotneg(mem);
Wei Yongjund4655ea2010-08-05 14:09:15 +0800999 test_btc(mem);
Wei Yongjun2e16c7f2010-08-09 18:01:13 +08001000 test_bsfbsr(mem);
Avi Kivity51d65a32010-08-19 19:15:31 +03001001 test_imul(mem);
Avi Kivityc2c43fc2013-02-09 11:32:25 +02001002 test_muldiv(mem);
Avi Kivityd7f3ee32011-03-29 14:44:49 +02001003 test_sse(mem);
Avi Kivity35870822012-03-22 12:58:06 +02001004 test_mmx(mem);
Avi Kivity8cfa5a02011-06-19 19:50:55 +03001005 test_rip_relative(mem, insn_ram);
Avi Kivityb212fcd2011-09-13 11:15:16 +03001006 test_shld_shrd(mem);
Avi Kivity47c14612012-06-27 11:36:32 +03001007 //test_lgdt_lidt(mem);
Avi Kivityfc2f8802012-06-27 11:36:33 +03001008 test_sreg(mem);
Avi Kivitycb615a42012-06-27 11:36:40 +03001009 test_lldt(mem);
Avi Kivity58a9d812012-06-27 11:36:41 +03001010 test_ltr(mem);
Avi Kivity7d36db32010-08-03 14:07:34 +03001011
Avi Kivityd7143f32012-03-25 15:49:05 +02001012 test_mmx_movq_mf(mem, insn_page, alt_insn_page, insn_ram);
Paolo Bonzini59033f42012-12-13 13:11:55 +01001013 test_movabs(mem, insn_page, alt_insn_page, insn_ram);
Avi Kivityd7143f32012-03-25 15:49:05 +02001014
Avi Kivityec278ce2012-04-18 19:27:00 +03001015 test_crosspage_mmio(mem);
1016
Xiao Guangronga19c7db2012-10-19 15:39:08 +08001017 test_string_io_mmio(mem);
1018
Jan Kiszkaf3cdd152014-01-04 18:59:16 +01001019 return report_summary();
Avi Kivity7d36db32010-08-03 14:07:34 +03001020}