comparison z80_to_x86.c @ 731:0835cd3dfc36

Z80 test cases that passed on 64-bit now pass on 32-bit
author Michael Pavone <pavone@retrodev.com>
date Sun, 24 May 2015 21:11:18 -0700
parents 38e9bee03749
children e21c274a008e
comparison
equal deleted inserted replaced
730:38e9bee03749 731:0835cd3dfc36
33 } 33 }
34 //TODO: Handle any necessary special cases 34 //TODO: Handle any necessary special cases
35 return SZ_B; 35 return SZ_B;
36 } 36 }
37 37
38 uint8_t zf_off(uint8_t flag)
39 {
40 return offsetof(z80_context, flags) + flag;
41 }
42
43 uint8_t zaf_off(uint8_t flag)
44 {
45 return offsetof(z80_context, alt_flags) + flag;
46 }
47
48 uint8_t zr_off(uint8_t reg)
49 {
50 if (reg > Z80_A) {
51 reg = z80_low_reg(reg);
52 }
53 return offsetof(z80_context, regs) + reg;
54 }
55
56 uint8_t zar_off(uint8_t reg)
57 {
58 if (reg > Z80_A) {
59 reg = z80_low_reg(reg);
60 }
61 return offsetof(z80_context, alt_regs) + reg;
62 }
63
64 void zreg_to_native(z80_options *opts, uint8_t reg, uint8_t native_reg)
65 {
66 if (opts->regs[reg] >= 0) {
67 mov_rr(&opts->gen.code, opts->regs[reg], native_reg, reg > Z80_A ? SZ_W : SZ_B);
68 } else {
69 mov_rdispr(&opts->gen.code, opts->gen.context_reg, zr_off(reg), native_reg, reg > Z80_A ? SZ_W : SZ_B);
70 }
71 }
72
73 void native_to_zreg(z80_options *opts, uint8_t native_reg, uint8_t reg)
74 {
75 if (opts->regs[reg] >= 0) {
76 mov_rr(&opts->gen.code, native_reg, opts->regs[reg], reg > Z80_A ? SZ_W : SZ_B);
77 } else {
78 mov_rrdisp(&opts->gen.code, native_reg, opts->gen.context_reg, zr_off(reg), reg > Z80_A ? SZ_W : SZ_B);
79 }
80 }
81
38 void translate_z80_reg(z80inst * inst, host_ea * ea, z80_options * opts) 82 void translate_z80_reg(z80inst * inst, host_ea * ea, z80_options * opts)
39 { 83 {
40 code_info *code = &opts->gen.code; 84 code_info *code = &opts->gen.code;
41 if (inst->reg == Z80_USE_IMMED) { 85 if (inst->reg == Z80_USE_IMMED) {
42 ea->mode = MODE_IMMED; 86 ea->mode = MODE_IMMED;
43 ea->disp = inst->immed; 87 ea->disp = inst->immed;
44 } else if ((inst->reg & 0x1F) == Z80_UNUSED) { 88 } else if ((inst->reg & 0x1F) == Z80_UNUSED) {
45 ea->mode = MODE_UNUSED; 89 ea->mode = MODE_UNUSED;
46 } else { 90 } else {
47 ea->mode = MODE_REG_DIRECT; 91 ea->mode = MODE_REG_DIRECT;
48 if (inst->reg == Z80_IYH) { 92 if (inst->reg == Z80_IYH && opts->regs[Z80_IYL] >= 0) {
49 if ((inst->addr_mode & 0x1F) == Z80_REG && inst->ea_reg == Z80_IYL) { 93 if ((inst->addr_mode & 0x1F) == Z80_REG && inst->ea_reg == Z80_IYL) {
50 mov_rr(code, opts->regs[Z80_IY], opts->gen.scratch1, SZ_W); 94 mov_rr(code, opts->regs[Z80_IY], opts->gen.scratch1, SZ_W);
51 ror_ir(code, 8, opts->gen.scratch1, SZ_W); 95 ror_ir(code, 8, opts->gen.scratch1, SZ_W);
52 ea->base = opts->gen.scratch1; 96 ea->base = opts->gen.scratch1;
53 } else { 97 } else {
71 } 115 }
72 } 116 }
73 } else { 117 } else {
74 ea->mode = MODE_REG_DISPLACE8; 118 ea->mode = MODE_REG_DISPLACE8;
75 ea->base = opts->gen.context_reg; 119 ea->base = opts->gen.context_reg;
76 ea->disp = offsetof(z80_context, regs) + inst->reg; 120 ea->disp = zr_off(inst->reg);
77 } 121 }
78 } 122 }
79 } 123 }
80 124
81 void z80_save_reg(z80inst * inst, z80_options * opts) 125 void z80_save_reg(z80inst * inst, z80_options * opts)
82 { 126 {
83 code_info *code = &opts->gen.code; 127 code_info *code = &opts->gen.code;
84 if (inst->reg == Z80_USE_IMMED || inst->reg == Z80_UNUSED) { 128 if (inst->reg == Z80_USE_IMMED || inst->reg == Z80_UNUSED) {
85 return; 129 return;
86 } 130 }
87 if (inst->reg == Z80_IYH) { 131 if (inst->reg == Z80_IYH && opts->regs[Z80_IYL] >= 0) {
88 if ((inst->addr_mode & 0x1F) == Z80_REG && inst->ea_reg == Z80_IYL) { 132 if ((inst->addr_mode & 0x1F) == Z80_REG && inst->ea_reg == Z80_IYL) {
89 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W); 133 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W);
90 mov_rr(code, opts->gen.scratch1, opts->regs[Z80_IYL], SZ_B); 134 mov_rr(code, opts->gen.scratch1, opts->regs[Z80_IYL], SZ_B);
91 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W); 135 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W);
92 } else { 136 } else {
114 ea->mode = MODE_REG_DIRECT; 158 ea->mode = MODE_REG_DIRECT;
115 areg = read ? opts->gen.scratch1 : opts->gen.scratch2; 159 areg = read ? opts->gen.scratch1 : opts->gen.scratch2;
116 switch(inst->addr_mode & 0x1F) 160 switch(inst->addr_mode & 0x1F)
117 { 161 {
118 case Z80_REG: 162 case Z80_REG:
119 if (inst->ea_reg == Z80_IYH && opts->regs[Z80_IY] >= 0) { 163 if (inst->ea_reg == Z80_IYH && opts->regs[Z80_IYL] >= 0) {
120 if (inst->reg == Z80_IYL) { 164 if (inst->reg == Z80_IYL) {
121 mov_rr(code, opts->regs[Z80_IY], opts->gen.scratch1, SZ_W); 165 mov_rr(code, opts->regs[Z80_IY], opts->gen.scratch1, SZ_W);
122 ror_ir(code, 8, opts->gen.scratch1, SZ_W); 166 ror_ir(code, 8, opts->gen.scratch1, SZ_W);
123 ea->base = opts->gen.scratch1; 167 ea->base = opts->gen.scratch1;
124 } else { 168 } else {
138 #endif 182 #endif
139 } 183 }
140 } else { 184 } else {
141 ea->mode = MODE_REG_DISPLACE8; 185 ea->mode = MODE_REG_DISPLACE8;
142 ea->base = opts->gen.context_reg; 186 ea->base = opts->gen.context_reg;
143 ea->disp = offsetof(z80_context, regs) + inst->ea_reg; 187 ea->disp = zr_off(inst->ea_reg);
144 } 188 }
145 break; 189 break;
146 case Z80_REG_INDIRECT: 190 case Z80_REG_INDIRECT:
147 if (opts->regs[inst->ea_reg] >= 0) { 191 zreg_to_native(opts, inst->ea_reg, areg);
148 mov_rr(code, opts->regs[inst->ea_reg], areg, SZ_W);
149 } else {
150 mov_rdispr(code, opts->gen.context_reg, offsetof(z80_context, regs) + z80_low_reg(inst->ea_reg), areg, SZ_W);
151 }
152 size = z80_size(inst); 192 size = z80_size(inst);
153 if (read) { 193 if (read) {
154 if (modify) { 194 if (modify) {
155 //push_r(code, opts->gen.scratch1); 195 //push_r(code, opts->gen.scratch1);
156 mov_rrdisp(code, opts->gen.scratch1, opts->gen.context_reg, offsetof(z80_context, scratch1), SZ_W); 196 mov_rrdisp(code, opts->gen.scratch1, opts->gen.context_reg, offsetof(z80_context, scratch1), SZ_W);
190 } 230 }
191 ea->base = opts->gen.scratch1; 231 ea->base = opts->gen.scratch1;
192 break; 232 break;
193 case Z80_IX_DISPLACE: 233 case Z80_IX_DISPLACE:
194 case Z80_IY_DISPLACE: 234 case Z80_IY_DISPLACE:
195 reg = opts->regs[(inst->addr_mode & 0x1F) == Z80_IX_DISPLACE ? Z80_IX : Z80_IY]; 235 zreg_to_native(opts, (inst->addr_mode & 0x1F) == Z80_IX_DISPLACE ? Z80_IX : Z80_IY, areg);
196 if (reg >= 0) {
197 mov_rr(code, reg, areg, SZ_W);
198 } else {
199 mov_rdispr(code, opts->gen.context_reg, offsetof(z80_context, regs) + (inst->addr_mode & 0x1F) == Z80_IX_DISPLACE ? Z80_IXL : Z80_IYL, areg, SZ_W);
200 }
201 add_ir(code, inst->ea_reg & 0x80 ? inst->ea_reg - 256 : inst->ea_reg, areg, SZ_W); 236 add_ir(code, inst->ea_reg & 0x80 ? inst->ea_reg - 256 : inst->ea_reg, areg, SZ_W);
202 size = z80_size(inst); 237 size = z80_size(inst);
203 if (read) { 238 if (read) {
204 if (modify) { 239 if (modify) {
205 //push_r(code, opts->gen.scratch1); 240 //push_r(code, opts->gen.scratch1);
227 } 262 }
228 263
229 void z80_save_ea(code_info *code, z80inst * inst, z80_options * opts) 264 void z80_save_ea(code_info *code, z80inst * inst, z80_options * opts)
230 { 265 {
231 if ((inst->addr_mode & 0x1F) == Z80_REG) { 266 if ((inst->addr_mode & 0x1F) == Z80_REG) {
232 if (inst->ea_reg == Z80_IYH) { 267 if (inst->ea_reg == Z80_IYH && opts->regs[Z80_IYL] >= 0) {
233 if (inst->reg == Z80_IYL) { 268 if (inst->reg == Z80_IYL) {
234 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W); 269 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W);
235 mov_rr(code, opts->gen.scratch1, opts->regs[Z80_IYL], SZ_B); 270 mov_rr(code, opts->gen.scratch1, opts->regs[Z80_IYL], SZ_B);
236 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W); 271 ror_ir(code, 8, opts->regs[Z80_IY], SZ_W);
237 } else { 272 } else {
272 307
273 enum { 308 enum {
274 DONT_MODIFY=0, 309 DONT_MODIFY=0,
275 MODIFY 310 MODIFY
276 }; 311 };
277
278 uint8_t zf_off(uint8_t flag)
279 {
280 return offsetof(z80_context, flags) + flag;
281 }
282
283 uint8_t zaf_off(uint8_t flag)
284 {
285 return offsetof(z80_context, alt_flags) + flag;
286 }
287
288 uint8_t zr_off(uint8_t reg)
289 {
290 if (reg > Z80_A) {
291 reg = z80_low_reg(reg);
292 }
293 return offsetof(z80_context, regs) + reg;
294 }
295
296 uint8_t zar_off(uint8_t reg)
297 {
298 if (reg > Z80_A) {
299 reg = z80_low_reg(reg);
300 }
301 return offsetof(z80_context, alt_regs) + reg;
302 }
303
304 void zreg_to_native(z80_options *opts, uint8_t reg, uint8_t native_reg)
305 {
306 if (opts->regs[reg] >= 0) {
307 mov_rr(&opts->gen.code, opts->regs[reg], native_reg, reg > Z80_A ? SZ_W : SZ_B);
308 } else {
309 mov_rdispr(&opts->gen.code, opts->gen.context_reg, zr_off(reg), native_reg, reg > Z80_A ? SZ_W : SZ_B);
310 }
311 }
312
313 void native_to_zreg(z80_options *opts, uint8_t native_reg, uint8_t reg)
314 {
315 if (opts->regs[reg] >= 0) {
316 mov_rr(&opts->gen.code, native_reg, opts->regs[reg], reg > Z80_A ? SZ_W : SZ_B);
317 } else {
318 mov_rrdisp(&opts->gen.code, native_reg, opts->gen.context_reg, zr_off(reg), reg > Z80_A ? SZ_W : SZ_B);
319 }
320 }
321 312
322 void z80_print_regs_exit(z80_context * context) 313 void z80_print_regs_exit(z80_context * context)
323 { 314 {
324 printf("A: %X\nB: %X\nC: %X\nD: %X\nE: %X\nHL: %X\nIX: %X\nIY: %X\nSP: %X\n\nIM: %d, IFF1: %d, IFF2: %d\n", 315 printf("A: %X\nB: %X\nC: %X\nD: %X\nE: %X\nHL: %X\nIX: %X\nIY: %X\nSP: %X\n\nIM: %d, IFF1: %d, IFF2: %d\n",
325 context->regs[Z80_A], context->regs[Z80_B], context->regs[Z80_C], 316 context->regs[Z80_A], context->regs[Z80_B], context->regs[Z80_C],
443 shl_ir(code, 1, opts->gen.scratch1, SZ_B); 434 shl_ir(code, 1, opts->gen.scratch1, SZ_B);
444 or_rdispr(code, opts->gen.context_reg, zf_off(ZF_N), opts->gen.scratch1, SZ_B); 435 or_rdispr(code, opts->gen.context_reg, zf_off(ZF_N), opts->gen.scratch1, SZ_B);
445 shl_ir(code, 1, opts->gen.scratch1, SZ_B); 436 shl_ir(code, 1, opts->gen.scratch1, SZ_B);
446 or_rdispr(code, opts->gen.context_reg, zf_off(ZF_C), opts->gen.scratch1, SZ_B); 437 or_rdispr(code, opts->gen.context_reg, zf_off(ZF_C), opts->gen.scratch1, SZ_B);
447 } else { 438 } else {
448 translate_z80_reg(inst, &src_op, opts); 439 zreg_to_native(opts, inst->reg, opts->gen.scratch1);
449 mov_rr(code, src_op.base, opts->gen.scratch1, SZ_W);
450 } 440 }
451 mov_rr(code, opts->regs[Z80_SP], opts->gen.scratch2, SZ_W); 441 mov_rr(code, opts->regs[Z80_SP], opts->gen.scratch2, SZ_W);
452 call(code, opts->write_16_highfirst); 442 call(code, opts->write_16_highfirst);
453 //no call to save_z80_reg needed since there's no chance we'll use the only 443 //no call to save_z80_reg needed since there's no chance we'll use the only
454 //the upper half of a register pair 444 //the upper half of a register pair
473 bt_ir(code, 7, opts->gen.scratch1, SZ_W); 463 bt_ir(code, 7, opts->gen.scratch1, SZ_W);
474 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_S)); 464 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_S));
475 shr_ir(code, 8, opts->gen.scratch1, SZ_W); 465 shr_ir(code, 8, opts->gen.scratch1, SZ_W);
476 native_to_zreg(opts, opts->gen.scratch1, Z80_A); 466 native_to_zreg(opts, opts->gen.scratch1, Z80_A);
477 } else { 467 } else {
478 translate_z80_reg(inst, &src_op, opts); 468 native_to_zreg(opts, opts->gen.scratch1, inst->reg);
479 mov_rr(code, opts->gen.scratch1, src_op.base, SZ_W);
480 } 469 }
481 //no call to save_z80_reg needed since there's no chance we'll use the only 470 //no call to save_z80_reg needed since there's no chance we'll use the only
482 //the upper half of a register pair 471 //the upper half of a register pair
483 break; 472 break;
484 case Z80_EX: 473 case Z80_EX:
1121 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C)); 1110 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C));
1122 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1111 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1123 //TODO: Implement half-carry flag 1112 //TODO: Implement half-carry flag
1124 if (inst->immed) { 1113 if (inst->immed) {
1125 //rlca does not set these flags 1114 //rlca does not set these flags
1126 cmp_ir(code, 0, dst_op.base, SZ_B); 1115 if (dst_op.mode == MODE_REG_DIRECT) {
1116 cmp_ir(code, 0, dst_op.base, SZ_B);
1117 } else {
1118 cmp_irdisp(code, 0, dst_op.base, dst_op.disp, SZ_B);
1119 }
1127 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV)); 1120 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV));
1128 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z)); 1121 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z));
1129 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1122 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1130 } 1123 }
1131 if (inst->addr_mode != Z80_UNUSED) { 1124 if (inst->addr_mode != Z80_UNUSED) {
1162 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C)); 1155 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C));
1163 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1156 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1164 //TODO: Implement half-carry flag 1157 //TODO: Implement half-carry flag
1165 if (inst->immed) { 1158 if (inst->immed) {
1166 //rla does not set these flags 1159 //rla does not set these flags
1167 cmp_ir(code, 0, dst_op.base, SZ_B); 1160 if (dst_op.mode == MODE_REG_DIRECT) {
1161 cmp_ir(code, 0, dst_op.base, SZ_B);
1162 } else {
1163 cmp_irdisp(code, 0, dst_op.base, dst_op.disp, SZ_B);
1164 }
1168 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV)); 1165 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV));
1169 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z)); 1166 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z));
1170 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1167 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1171 } 1168 }
1172 if (inst->addr_mode != Z80_UNUSED) { 1169 if (inst->addr_mode != Z80_UNUSED) {
1202 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C)); 1199 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C));
1203 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1200 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1204 //TODO: Implement half-carry flag 1201 //TODO: Implement half-carry flag
1205 if (inst->immed) { 1202 if (inst->immed) {
1206 //rrca does not set these flags 1203 //rrca does not set these flags
1207 cmp_ir(code, 0, dst_op.base, SZ_B); 1204 if (dst_op.mode == MODE_REG_DIRECT) {
1205 cmp_ir(code, 0, dst_op.base, SZ_B);
1206 } else {
1207 cmp_irdisp(code, 0, dst_op.base, dst_op.disp, SZ_B);
1208 }
1208 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV)); 1209 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV));
1209 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z)); 1210 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z));
1210 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1211 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1211 } 1212 }
1212 if (inst->addr_mode != Z80_UNUSED) { 1213 if (inst->addr_mode != Z80_UNUSED) {
1243 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C)); 1244 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C));
1244 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1245 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1245 //TODO: Implement half-carry flag 1246 //TODO: Implement half-carry flag
1246 if (inst->immed) { 1247 if (inst->immed) {
1247 //rra does not set these flags 1248 //rra does not set these flags
1248 cmp_ir(code, 0, dst_op.base, SZ_B); 1249 if (dst_op.mode == MODE_REG_DIRECT) {
1250 cmp_ir(code, 0, dst_op.base, SZ_B);
1251 } else {
1252 cmp_irdisp(code, 0, dst_op.base, dst_op.disp, SZ_B);
1253 }
1249 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV)); 1254 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV));
1250 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z)); 1255 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z));
1251 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1256 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1252 } 1257 }
1253 if (inst->addr_mode != Z80_UNUSED) { 1258 if (inst->addr_mode != Z80_UNUSED) {
1276 } else { 1281 } else {
1277 shl_irdisp(code, 1, dst_op.base, dst_op.disp, SZ_B); 1282 shl_irdisp(code, 1, dst_op.base, dst_op.disp, SZ_B);
1278 } 1283 }
1279 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C)); 1284 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C));
1280 if (inst->op == Z80_SLL) { 1285 if (inst->op == Z80_SLL) {
1281 or_ir(code, 1, dst_op.base, SZ_B); 1286 if (dst_op.mode == MODE_REG_DIRECT) {
1287 or_ir(code, 1, dst_op.base, SZ_B);
1288 } else {
1289 or_irdisp(code, 1, dst_op.base, dst_op.disp, SZ_B);
1290 }
1282 } 1291 }
1283 if (src_op.mode == MODE_REG_DIRECT) { 1292 if (src_op.mode == MODE_REG_DIRECT) {
1284 mov_rr(code, dst_op.base, src_op.base, SZ_B); 1293 mov_rr(code, dst_op.base, src_op.base, SZ_B);
1285 } else if(src_op.mode == MODE_REG_DISPLACE8) { 1294 } else if(src_op.mode == MODE_REG_DISPLACE8) {
1286 mov_rrdisp(code, dst_op.base, src_op.base, src_op.disp, SZ_B); 1295 mov_rrdisp(code, dst_op.base, src_op.base, src_op.disp, SZ_B);
1287 } 1296 }
1288 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1297 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1289 //TODO: Implement half-carry flag 1298 //TODO: Implement half-carry flag
1290 cmp_ir(code, 0, dst_op.base, SZ_B); 1299 if (dst_op.mode == MODE_REG_DIRECT) {
1300 cmp_ir(code, 0, dst_op.base, SZ_B);
1301 } else {
1302 cmp_irdisp(code, 0, dst_op.base, dst_op.disp, SZ_B);
1303 }
1291 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV)); 1304 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV));
1292 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z)); 1305 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z));
1293 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1306 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1294 if (inst->addr_mode != Z80_UNUSED) { 1307 if (inst->addr_mode != Z80_UNUSED) {
1295 z80_save_result(opts, inst); 1308 z80_save_result(opts, inst);
1322 mov_rrdisp(code, dst_op.base, src_op.base, src_op.disp, SZ_B); 1335 mov_rrdisp(code, dst_op.base, src_op.base, src_op.disp, SZ_B);
1323 } 1336 }
1324 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C)); 1337 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C));
1325 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1338 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1326 //TODO: Implement half-carry flag 1339 //TODO: Implement half-carry flag
1327 cmp_ir(code, 0, dst_op.base, SZ_B); 1340 if (dst_op.mode == MODE_REG_DIRECT) {
1341 cmp_ir(code, 0, dst_op.base, SZ_B);
1342 } else {
1343 cmp_irdisp(code, 0, dst_op.base, dst_op.disp, SZ_B);
1344 }
1328 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV)); 1345 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV));
1329 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z)); 1346 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z));
1330 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1347 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1331 if (inst->addr_mode != Z80_UNUSED) { 1348 if (inst->addr_mode != Z80_UNUSED) {
1332 z80_save_result(opts, inst); 1349 z80_save_result(opts, inst);
1359 mov_rrdisp(code, dst_op.base, src_op.base, src_op.disp, SZ_B); 1376 mov_rrdisp(code, dst_op.base, src_op.base, src_op.disp, SZ_B);
1360 } 1377 }
1361 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C)); 1378 setcc_rdisp(code, CC_C, opts->gen.context_reg, zf_off(ZF_C));
1362 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1379 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1363 //TODO: Implement half-carry flag 1380 //TODO: Implement half-carry flag
1364 cmp_ir(code, 0, dst_op.base, SZ_B); 1381 if (dst_op.mode == MODE_REG_DIRECT) {
1382 cmp_ir(code, 0, dst_op.base, SZ_B);
1383 } else {
1384 cmp_irdisp(code, 0, dst_op.base, dst_op.disp, SZ_B);
1385 }
1365 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV)); 1386 setcc_rdisp(code, CC_P, opts->gen.context_reg, zf_off(ZF_PV));
1366 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z)); 1387 setcc_rdisp(code, CC_Z, opts->gen.context_reg, zf_off(ZF_Z));
1367 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1388 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1368 if (inst->addr_mode != Z80_UNUSED) { 1389 if (inst->addr_mode != Z80_UNUSED) {
1369 z80_save_result(opts, inst); 1390 z80_save_result(opts, inst);
1446 } 1467 }
1447 if (inst->addr_mode != Z80_REG) { 1468 if (inst->addr_mode != Z80_REG) {
1448 //Reads normally take 3 cycles, but the read at the end of a bit instruction takes 4 1469 //Reads normally take 3 cycles, but the read at the end of a bit instruction takes 4
1449 cycles(&opts->gen, 1); 1470 cycles(&opts->gen, 1);
1450 } 1471 }
1451 bt_ir(code, bit, src_op.base, size); 1472 if (src_op.mode == MODE_REG_DIRECT) {
1473 bt_ir(code, bit, src_op.base, size);
1474 } else {
1475 bt_irdisp(code, bit, src_op.base, src_op.disp, size);
1476 }
1452 setcc_rdisp(code, CC_NC, opts->gen.context_reg, zf_off(ZF_Z)); 1477 setcc_rdisp(code, CC_NC, opts->gen.context_reg, zf_off(ZF_Z));
1453 setcc_rdisp(code, CC_NC, opts->gen.context_reg, zf_off(ZF_PV)); 1478 setcc_rdisp(code, CC_NC, opts->gen.context_reg, zf_off(ZF_PV));
1454 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B); 1479 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_N), SZ_B);
1455 if (inst->immed == 7) { 1480 if (inst->immed == 7) {
1456 cmp_ir(code, 0, src_op.base, size); 1481 if (src_op.mode == MODE_REG_DIRECT) {
1482 cmp_ir(code, 0, src_op.base, size);
1483 } else {
1484 cmp_irdisp(code, 0, src_op.base, src_op.disp, size);
1485 }
1457 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S)); 1486 setcc_rdisp(code, CC_S, opts->gen.context_reg, zf_off(ZF_S));
1458 } else { 1487 } else {
1459 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_S), SZ_B); 1488 mov_irdisp(code, 0, opts->gen.context_reg, zf_off(ZF_S), SZ_B);
1460 } 1489 }
1461 break; 1490 break;
1478 } 1507 }
1479 if (inst->addr_mode != Z80_REG) { 1508 if (inst->addr_mode != Z80_REG) {
1480 //Reads normally take 3 cycles, but the read in the middle of a set instruction takes 4 1509 //Reads normally take 3 cycles, but the read in the middle of a set instruction takes 4
1481 cycles(&opts->gen, 1); 1510 cycles(&opts->gen, 1);
1482 } 1511 }
1483 bts_ir(code, bit, src_op.base, size); 1512 if (src_op.mode == MODE_REG_DIRECT) {
1513 bts_ir(code, bit, src_op.base, size);
1514 } else {
1515 bts_irdisp(code, bit, src_op.base, src_op.disp, size);
1516 }
1484 if (inst->reg != Z80_USE_IMMED) { 1517 if (inst->reg != Z80_USE_IMMED) {
1485 if (size == SZ_W) { 1518 if (size == SZ_W) {
1486 #ifdef X86_64 1519 #ifdef X86_64
1487 if (dst_op.base >= R8) { 1520 if (dst_op.base >= R8) {
1488 ror_ir(code, 8, src_op.base, SZ_W); 1521 ror_ir(code, 8, src_op.base, SZ_W);
1489 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B); 1522 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B);
1490 ror_ir(code, 8, src_op.base, SZ_W); 1523 ror_ir(code, 8, src_op.base, SZ_W);
1491 } else { 1524 } else {
1492 #endif 1525 #endif
1493 mov_rr(code, opts->regs[inst->ea_reg], dst_op.base, SZ_B); 1526 if (dst_op.mode == MODE_REG_DIRECT) {
1527 zreg_to_native(opts, inst->ea_reg, dst_op.base);
1528 } else {
1529 zreg_to_native(opts, inst->ea_reg, opts->gen.scratch1);
1530 mov_rrdisp(code, opts->gen.scratch1, dst_op.base, dst_op.disp, SZ_B);
1531 }
1494 #ifdef X86_64 1532 #ifdef X86_64
1495 } 1533 }
1496 #endif 1534 #endif
1497 } else { 1535 } else {
1498 mov_rr(code, src_op.base, dst_op.base, SZ_B); 1536 if (dst_op.mode == MODE_REG_DIRECT) {
1537 if (src_op.mode == MODE_REG_DIRECT) {
1538 mov_rr(code, src_op.base, dst_op.base, SZ_B);
1539 } else {
1540 mov_rdispr(code, src_op.base, src_op.disp, dst_op.base, SZ_B);
1541 }
1542 } else if (src_op.mode == MODE_REG_DIRECT) {
1543 mov_rrdisp(code, src_op.base, dst_op.base, dst_op.disp, SZ_B);
1544 } else {
1545 mov_rdispr(code, src_op.base, src_op.disp, opts->gen.scratch1, SZ_B);
1546 mov_rrdisp(code, opts->gen.scratch1, dst_op.base, dst_op.disp, SZ_B);
1547 }
1499 } 1548 }
1500 } 1549 }
1501 if ((inst->addr_mode & 0x1F) != Z80_REG) { 1550 if ((inst->addr_mode & 0x1F) != Z80_REG) {
1502 z80_save_result(opts, inst); 1551 z80_save_result(opts, inst);
1503 if (inst->reg != Z80_USE_IMMED) { 1552 if (inst->reg != Z80_USE_IMMED) {
1524 } 1573 }
1525 if (inst->addr_mode != Z80_REG) { 1574 if (inst->addr_mode != Z80_REG) {
1526 //Reads normally take 3 cycles, but the read in the middle of a set instruction takes 4 1575 //Reads normally take 3 cycles, but the read in the middle of a set instruction takes 4
1527 cycles(&opts->gen, 1); 1576 cycles(&opts->gen, 1);
1528 } 1577 }
1529 btr_ir(code, bit, src_op.base, size); 1578 if (src_op.mode == MODE_REG_DIRECT) {
1579 btr_ir(code, bit, src_op.base, size);
1580 } else {
1581 btr_irdisp(code, bit, src_op.base, src_op.disp, size);
1582 }
1530 if (inst->reg != Z80_USE_IMMED) { 1583 if (inst->reg != Z80_USE_IMMED) {
1531 if (size == SZ_W) { 1584 if (size == SZ_W) {
1532 #ifdef X86_64 1585 #ifdef X86_64
1533 if (dst_op.base >= R8) { 1586 if (dst_op.base >= R8) {
1534 ror_ir(code, 8, src_op.base, SZ_W); 1587 ror_ir(code, 8, src_op.base, SZ_W);
1535 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B); 1588 mov_rr(code, opts->regs[z80_low_reg(inst->ea_reg)], dst_op.base, SZ_B);
1536 ror_ir(code, 8, src_op.base, SZ_W); 1589 ror_ir(code, 8, src_op.base, SZ_W);
1537 } else { 1590 } else {
1538 #endif 1591 #endif
1539 mov_rr(code, opts->regs[inst->ea_reg], dst_op.base, SZ_B); 1592 if (dst_op.mode == MODE_REG_DIRECT) {
1593 zreg_to_native(opts, inst->ea_reg, dst_op.base);
1594 } else {
1595 zreg_to_native(opts, inst->ea_reg, opts->gen.scratch1);
1596 mov_rrdisp(code, opts->gen.scratch1, dst_op.base, dst_op.disp, SZ_B);
1597 }
1540 #ifdef X86_64 1598 #ifdef X86_64
1541 } 1599 }
1542 #endif 1600 #endif
1543 } else { 1601 } else {
1544 mov_rr(code, src_op.base, dst_op.base, SZ_B); 1602 if (dst_op.mode == MODE_REG_DIRECT) {
1603 if (src_op.mode == MODE_REG_DIRECT) {
1604 mov_rr(code, src_op.base, dst_op.base, SZ_B);
1605 } else {
1606 mov_rdispr(code, src_op.base, src_op.disp, dst_op.base, SZ_B);
1607 }
1608 } else if (src_op.mode == MODE_REG_DIRECT) {
1609 mov_rrdisp(code, src_op.base, dst_op.base, dst_op.disp, SZ_B);
1610 } else {
1611 mov_rdispr(code, src_op.base, src_op.disp, opts->gen.scratch1, SZ_B);
1612 mov_rrdisp(code, opts->gen.scratch1, dst_op.base, dst_op.disp, SZ_B);
1613 }
1545 } 1614 }
1546 } 1615 }
1547 if (inst->addr_mode != Z80_REG) { 1616 if (inst->addr_mode != Z80_REG) {
1548 z80_save_result(opts, inst); 1617 z80_save_result(opts, inst);
1549 if (inst->reg != Z80_USE_IMMED) { 1618 if (inst->reg != Z80_USE_IMMED) {