comparison gen_x86.c @ 49:d2e43d64e999

Add untested support for and, eor, or, swap, tst and nop instructions. Add call to m68k_save_result for add and sub so that they will properly save results for memory destinations
author Mike Pavone <pavone@retrodev.com>
date Wed, 12 Dec 2012 23:21:11 -0800
parents 3e7bfde7606e
children 937b47c9b79b
comparison
equal deleted inserted replaced
48:0bdda50c7364 49:d2e43d64e999
26 #define OP_MOV 0x88 26 #define OP_MOV 0x88
27 #define OP_PUSHF 0x9C 27 #define OP_PUSHF 0x9C
28 #define OP_POPF 0x9D 28 #define OP_POPF 0x9D
29 #define OP_MOV_I8R 0xB0 29 #define OP_MOV_I8R 0xB0
30 #define OP_MOV_IR 0xB8 30 #define OP_MOV_IR 0xB8
31 #define OP_SHIFTROT_IR 0xC0
31 #define OP_RETN 0xC3 32 #define OP_RETN 0xC3
32 #define OP_MOV_IEA 0xC6 33 #define OP_MOV_IEA 0xC6
34 #define OP_SHIFTROT_1 0xD0
35 #define OP_SHIRTROT_CL 0xD2
33 #define OP_CALL 0xE8 36 #define OP_CALL 0xE8
34 #define OP_JMP 0xE9 37 #define OP_JMP 0xE9
35 #define OP_JMP_BYTE 0xEB 38 #define OP_JMP_BYTE 0xEB
36 #define OP_CALL_EA 0xFF 39 #define OP_CALL_EA 0xFF
37 40
44 #define OP_EX_SBBI 0x3 47 #define OP_EX_SBBI 0x3
45 #define OP_EX_ANDI 0x4 48 #define OP_EX_ANDI 0x4
46 #define OP_EX_SUBI 0x5 49 #define OP_EX_SUBI 0x5
47 #define OP_EX_XORI 0x6 50 #define OP_EX_XORI 0x6
48 #define OP_EX_CMPI 0x7 51 #define OP_EX_CMPI 0x7
52
53 #define OP_EX_ROL 0x0
54 #define OP_EX_ROR 0x1
55 #define OP_EX_RCL 0x2
56 #define OP_EX_RCR 0x3
57 #define OP_EX_SHL 0x4
58 #define OP_EX_SHR 0x5
59 #define OP_EX_SAL 0x6 //identical to SHL
60 #define OP_EX_SAR 0x7
49 61
50 #define BIT_IMMED_RAX 0x4 62 #define BIT_IMMED_RAX 0x4
51 #define BIT_DIR 0x2 63 #define BIT_DIR 0x2
52 #define BIT_SIZE 0x1 64 #define BIT_SIZE 0x1
53 65
292 } 304 }
293 return out; 305 return out;
294 } 306 }
295 307
296 308
309 uint8_t * x86_shiftrot_ir(uint8_t * out, uint8_t op_ex, uint8_t val, uint8_t dst, uint8_t size)
310 {
311 if (size == SZ_W) {
312 *(out++) = PRE_SIZE;
313 }
314 if (size == SZ_Q || dst >= R8 || (size == SZ_B && dst >= RSP && dst <= RDI)) {
315 *out = PRE_REX;
316 if (size == SZ_Q) {
317 *out |= REX_QUAD;
318 }
319 if (dst >= R8) {
320 *out |= REX_RM_FIELD;
321 dst -= (R8 - X86_R8);
322 }
323 out++;
324 }
325 if (dst >= AH && dst <= BH) {
326 dst -= (AH-X86_AH);
327 }
328
329 *(out++) = (val == 1 ? OP_SHIFTROT_1: OP_SHIFTROT_IR) | (size == SZ_B ? 0 : BIT_SIZE);
330 *(out++) = MODE_REG_DIRECT | dst | (op_ex << 3);
331 if (val != 1) {
332 *(out++) = val;
333 }
334 return out;
335 }
336
337 uint8_t * x86_shiftrot_irdisp8(uint8_t * out, uint8_t op_ex, uint8_t val, uint8_t dst, int8_t disp, uint8_t size)
338 {
339 if (size == SZ_W) {
340 *(out++) = PRE_SIZE;
341 }
342 if (size == SZ_Q || dst >= R8 || (size == SZ_B && dst >= RSP && dst <= RDI)) {
343 *out = PRE_REX;
344 if (size == SZ_Q) {
345 *out |= REX_QUAD;
346 }
347 if (dst >= R8) {
348 *out |= REX_RM_FIELD;
349 dst -= (R8 - X86_R8);
350 }
351 out++;
352 }
353 if (dst >= AH && dst <= BH) {
354 dst -= (AH-X86_AH);
355 }
356
357 *(out++) = (val == 1 ? OP_SHIFTROT_1: OP_SHIFTROT_IR) | (size == SZ_B ? 0 : BIT_SIZE);
358 *(out++) = MODE_REG_DISPLACE8 | dst | (op_ex << 3);
359 *(out++) = disp;
360 if (val != 1) {
361 *(out++) = val;
362 }
363 return out;
364 }
365
366 uint8_t * rol_ir(uint8_t * out, uint8_t val, uint8_t dst, uint8_t size)
367 {
368 return x86_shiftrot_ir(out, OP_EX_ROL, val, dst, size);
369 }
370
371 uint8_t * ror_ir(uint8_t * out, uint8_t val, uint8_t dst, uint8_t size)
372 {
373 return x86_shiftrot_ir(out, OP_EX_ROR, val, dst, size);
374 }
375
376 uint8_t * rcl_ir(uint8_t * out, uint8_t val, uint8_t dst, uint8_t size)
377 {
378 return x86_shiftrot_ir(out, OP_EX_RCL, val, dst, size);
379 }
380
381 uint8_t * rcr_ir(uint8_t * out, uint8_t val, uint8_t dst, uint8_t size)
382 {
383 return x86_shiftrot_ir(out, OP_EX_RCR, val, dst, size);
384 }
385
386 uint8_t * shl_ir(uint8_t * out, uint8_t val, uint8_t dst, uint8_t size)
387 {
388 return x86_shiftrot_ir(out, OP_EX_SHL, val, dst, size);
389 }
390
391 uint8_t * shr_ir(uint8_t * out, uint8_t val, uint8_t dst, uint8_t size)
392 {
393 return x86_shiftrot_ir(out, OP_EX_SHR, val, dst, size);
394 }
395
396 uint8_t * sar_ir(uint8_t * out, uint8_t val, uint8_t dst, uint8_t size)
397 {
398 return x86_shiftrot_ir(out, OP_EX_SAR, val, dst, size);
399 }
400
401 uint8_t * rol_irdisp8(uint8_t * out, uint8_t val, uint8_t dst_base, int8_t disp, uint8_t size)
402 {
403 return x86_shiftrot_irdisp8(out, OP_EX_ROL, val, dst_base, disp, size);
404 }
405
406 uint8_t * ror_irdisp8(uint8_t * out, uint8_t val, uint8_t dst_base, int8_t disp, uint8_t size)
407 {
408 return x86_shiftrot_irdisp8(out, OP_EX_ROR, val, dst_base, disp, size);
409 }
410
411 uint8_t * rcl_irdisp8(uint8_t * out, uint8_t val, uint8_t dst_base, int8_t disp, uint8_t size)
412 {
413 return x86_shiftrot_irdisp8(out, OP_EX_RCL, val, dst_base, disp, size);
414 }
415
416 uint8_t * rcr_irdisp8(uint8_t * out, uint8_t val, uint8_t dst_base, int8_t disp, uint8_t size)
417 {
418 return x86_shiftrot_irdisp8(out, OP_EX_RCR, val, dst_base, disp, size);
419 }
420
421 uint8_t * shl_irdisp8(uint8_t * out, uint8_t val, uint8_t dst_base, int8_t disp, uint8_t size)
422 {
423 return x86_shiftrot_irdisp8(out, OP_EX_SHL, val, dst_base, disp, size);
424 }
425
426 uint8_t * shr_irdisp8(uint8_t * out, uint8_t val, uint8_t dst_base, int8_t disp, uint8_t size)
427 {
428 return x86_shiftrot_irdisp8(out, OP_EX_SHR, val, dst_base, disp, size);
429 }
430
431 uint8_t * sar_irdisp8(uint8_t * out, uint8_t val, uint8_t dst_base, int8_t disp, uint8_t size)
432 {
433 return x86_shiftrot_irdisp8(out, OP_EX_SAR, val, dst_base, disp, size);
434 }
435
297 uint8_t * add_rr(uint8_t * out, uint8_t src, uint8_t dst, uint8_t size) 436 uint8_t * add_rr(uint8_t * out, uint8_t src, uint8_t dst, uint8_t size)
298 { 437 {
299 return x86_rr_sizedir(out, OP_ADD, src, dst, size); 438 return x86_rr_sizedir(out, OP_ADD, src, dst, size);
300 } 439 }
301 440