comparison z80_to_x86.c @ 235:d9bf8e61c33c

Get Z80 core working for simple programs
author Mike Pavone <pavone@retrodev.com>
date Thu, 25 Apr 2013 21:01:11 -0700
parents 4d4559b04c59
children 19fb3523a9e5
comparison
equal deleted inserted replaced
234:f456ee23d372 235:d9bf8e61c33c
1 #include "z80inst.h"
1 #include "z80_to_x86.h" 2 #include "z80_to_x86.h"
2 #include "gen_x86.h" 3 #include "gen_x86.h"
4 #include "mem.h"
5 #include <stdio.h>
6 #include <stdlib.h>
7 #include <stddef.h>
8 #include <string.h>
3 9
4 #define MODE_UNUSED (MODE_IMMED-1) 10 #define MODE_UNUSED (MODE_IMMED-1)
5 11
6 #define ZCYCLES RBP 12 #define ZCYCLES RBP
7 #define SCRATCH1 R13 13 #define SCRATCH1 R13
8 #define SCRATCH2 R14 14 #define SCRATCH2 R14
15 #define CONTEXT RSI
16
17 //TODO: Find out the actual value for this
18 #define MAX_NATIVE_SIZE 128
9 19
10 void z80_read_byte(); 20 void z80_read_byte();
11 void z80_read_word(); 21 void z80_read_word();
12 22 void z80_write_byte();
13 uint8_t z80_size(z80_inst * inst) 23 void z80_write_word_highfirst();
24 void z80_write_word_lowfirst();
25 void z80_save_context();
26 void z80_native_addr();
27
28 uint8_t z80_size(z80inst * inst)
14 { 29 {
15 uint8_t reg = (inst->reg & 0x1F); 30 uint8_t reg = (inst->reg & 0x1F);
16 if (reg != Z80_UNUSED &&) { 31 if (reg != Z80_UNUSED && reg != Z80_USE_IMMED) {
17 return reg < Z80_BC ? SZ_B : SZ_W; 32 return reg < Z80_BC ? SZ_B : SZ_W;
18 } 33 }
19 //TODO: Handle any necessary special cases 34 //TODO: Handle any necessary special cases
20 return SZ_B; 35 return SZ_B;
21 } 36 }
22 37
23 uint8_t * zcylces(dst, uint32_t num_cycles) 38 uint8_t * zcycles(uint8_t * dst, uint32_t num_cycles)
24 { 39 {
25 return add_ir(dst, num_cycles, ZCYCLES, SZ_D); 40 return add_ir(dst, num_cycles, ZCYCLES, SZ_D);
26 } 41 }
27 42
28 uint8_t * translate_z80_reg(z80_inst * inst, x86_ea * ea, uint8_t * dst, x86_z80_options * opts) 43 uint8_t * translate_z80_reg(z80inst * inst, x86_ea * ea, uint8_t * dst, x86_z80_options * opts)
29 { 44 {
30 if (inst->reg == Z80_USE_IMMED) { 45 if (inst->reg == Z80_USE_IMMED) {
31 ea->mode = MODE_IMMED; 46 ea->mode = MODE_IMMED;
32 ea->disp = inst->immed; 47 ea->disp = inst->immed;
33 } else if ((inst->reg & 0x1F) == Z80_UNUSED) { 48 } else if ((inst->reg & 0x1F) == Z80_UNUSED) {
34 ea->mode = MODE_UNUSED; 49 ea->mode = MODE_UNUSED;
35 } else { 50 } else {
36 ea->mode = MODE_REG; 51 ea->mode = MODE_REG_DIRECT;
37 if (inst->reg == Z80_IYH) { 52 if (inst->reg == Z80_IYH) {
38 ea->base = opts->regs[Z80_IYL]; 53 ea->base = opts->regs[Z80_IYL];
39 dst = ror_ir(dst, 8, opts->regs[Z80_IY], SZ_W); 54 dst = ror_ir(dst, 8, opts->regs[Z80_IY], SZ_W);
40 } else { 55 } else {
41 ea->base = opts->regs[inst->reg] 56 ea->base = opts->regs[inst->reg];
42 } 57 }
43 } 58 }
44 return dst; 59 return dst;
45 } 60 }
46 61
47 uint8_t * save_z80_reg(uint8_t * dst, z80_inst * inst, x86_z80_options * opts) 62 uint8_t * z80_save_reg(uint8_t * dst, z80inst * inst, x86_z80_options * opts)
48 { 63 {
49 if (inst->reg == Z80_IYH) { 64 if (inst->reg == Z80_IYH) {
50 dst = ror_ir(dst, 8, opts->regs[Z80_IY], SZ_W); 65 dst = ror_ir(dst, 8, opts->regs[Z80_IY], SZ_W);
51 } 66 }
52 return dst; 67 return dst;
53 } 68 }
54 69
55 uint8_t * translate_z80_ea(z80_inst * inst, x86_ea * ea, uint8_t * dst, x86_z80_options * opts, uint8_t read, uint8_t modify) 70 uint8_t * translate_z80_ea(z80inst * inst, x86_ea * ea, uint8_t * dst, x86_z80_options * opts, uint8_t read, uint8_t modify)
56 { 71 {
57 uint8_t size, reg, areg; 72 uint8_t size, reg, areg;
58 ea->mode = MODE_REG; 73 ea->mode = MODE_REG_DIRECT;
59 areg = read ? SCRATCH1 : SCRATCH2; 74 areg = read ? SCRATCH1 : SCRATCH2;
60 switch(inst->addr_mode & 0x1F) 75 switch(inst->addr_mode & 0x1F)
61 { 76 {
62 case Z80_REG: 77 case Z80_REG:
63 if (inst->ea_reg == Z80_IYH) { 78 if (inst->ea_reg == Z80_IYH) {
105 dst = pop_r(dst, SCRATCH2); 120 dst = pop_r(dst, SCRATCH2);
106 } 121 }
107 } 122 }
108 ea->base = SCRATCH1; 123 ea->base = SCRATCH1;
109 break; 124 break;
110 case Z80_IX_INDEXED: 125 case Z80_IX_DISPLACE:
111 case Z80_IY_INDEXED: 126 case Z80_IY_DISPLACE:
112 reg = opts->regs[inst->addr_mode == Z80_IX_INDEXED ? Z80_IX : Z80_IY]; 127 reg = opts->regs[inst->addr_mode == Z80_IX_DISPLACE ? Z80_IX : Z80_IY];
113 dst = mov_rr(dst, reg, areg, SZ_W); 128 dst = mov_rr(dst, reg, areg, SZ_W);
114 dst = add_ir(dst, inst->immed, areg, SZ_W); 129 dst = add_ir(dst, inst->immed, areg, SZ_W);
115 size = z80_size(inst); 130 size = z80_size(inst);
116 if (read) { 131 if (read) {
117 if (modify) { 132 if (modify) {
126 dst = pop_r(dst, SCRATCH2); 141 dst = pop_r(dst, SCRATCH2);
127 } 142 }
128 } 143 }
129 break; 144 break;
130 case Z80_UNUSED: 145 case Z80_UNUSED:
131 ea->mode = MODE_UNUSED: 146 ea->mode = MODE_UNUSED;
132 break; 147 break;
133 default: 148 default:
134 fprintf(stderr, "Unrecognized Z80 addressing mode %d\n", inst->addr_mode); 149 fprintf(stderr, "Unrecognized Z80 addressing mode %d\n", inst->addr_mode);
135 exit(1); 150 exit(1);
136 } 151 }
137 return dst; 152 return dst;
138 } 153 }
139 154
140 uint8_t * z80_save_ea(uint8_t * dst, z80_inst * inst, x86_z80_options * opts) 155 uint8_t * z80_save_ea(uint8_t * dst, z80inst * inst, x86_z80_options * opts)
141 { 156 {
142 if (inst->addr_mode == Z80_REG_DIRECT && inst->ea_reg == Z80_IYH) { 157 if (inst->addr_mode == Z80_REG && inst->ea_reg == Z80_IYH) {
143 dst = ror_ir(dst, 8, opts->regs[Z80_IY], SZ_W); 158 dst = ror_ir(dst, 8, opts->regs[Z80_IY], SZ_W);
144 } 159 }
145 return dst; 160 return dst;
146 } 161 }
147 162
148 uint8_t * z80_save_result(uint8_t * dst, z80_inst * inst) 163 uint8_t * z80_save_result(uint8_t * dst, z80inst * inst)
149 { 164 {
150 if (z80_size(inst). == SZ_B) { 165 if (z80_size(inst) == SZ_B) {
151 dst = call(dst, (uint8_t *)z80_write_byte); 166 dst = call(dst, (uint8_t *)z80_write_byte);
152 } else { 167 } else {
153 dst = call(dst, (uint8_t *)z80_write_word); 168 dst = call(dst, (uint8_t *)z80_write_word_lowfirst);
154 } 169 }
155 return dst; 170 return dst;
156 } 171 }
157 172
158 enum { 173 enum {
168 uint8_t zf_off(uint8_t flag) 183 uint8_t zf_off(uint8_t flag)
169 { 184 {
170 return offsetof(z80_context, flags) + flag; 185 return offsetof(z80_context, flags) + flag;
171 } 186 }
172 187
173 uint8_t * translate_z80_inst(z80_inst * inst, uint8_t * dst, x86_z80_options * opts) 188 void z80_print_regs_exit(z80_context * context)
189 {
190 printf("A: %X\nB: %X\nC: %X\nD: %X\nE: %X\nHL: %X\nIX: %X\nIY: %X\nSP: %X\n",
191 context->regs[Z80_A], context->regs[Z80_B], context->regs[Z80_C],
192 context->regs[Z80_D], context->regs[Z80_E],
193 (context->regs[Z80_H] << 8) | context->regs[Z80_L],
194 (context->regs[Z80_IXH] << 8) | context->regs[Z80_IXL],
195 (context->regs[Z80_IYH] << 8) | context->regs[Z80_IYL],
196 context->sp);
197 exit(0);
198 }
199
200 uint8_t * translate_z80inst(z80inst * inst, uint8_t * dst, z80_context * context, uint16_t address)
174 { 201 {
175 uint32_t cycles; 202 uint32_t cycles;
176 x86_ea src_op, dst_op; 203 x86_ea src_op, dst_op;
204 uint8_t size;
205 x86_z80_options *opts = context->options;
177 switch(inst->op) 206 switch(inst->op)
178 { 207 {
179 case Z80_LD: 208 case Z80_LD:
209 size = z80_size(inst);
210 switch (inst->addr_mode & 0x1F)
211 {
212 case Z80_REG:
213 case Z80_REG_INDIRECT:
214 cycles = size == SZ_B ? 4 : 6;
215 if (inst->ea_reg == Z80_IX || inst->ea_reg == Z80_IY) {
216 cycles += 4;
217 }
218 break;
219 case Z80_IMMED:
220 cycles = size == SZ_B ? 7 : 10;
221 break;
222 case Z80_IMMED_INDIRECT:
223 cycles = 10;
224 break;
225 case Z80_IX_DISPLACE:
226 case Z80_IY_DISPLACE:
227 cycles = 12;
228 break;
229 }
230 if ((inst->reg >= Z80_IXL && inst->reg <= Z80_IYH) || inst->reg == Z80_IX || inst->reg == Z80_IY) {
231 cycles += 4;
232 }
233 dst = zcycles(dst, cycles);
180 if (inst->addr_mode & Z80_DIR) { 234 if (inst->addr_mode & Z80_DIR) {
235 dst = translate_z80_reg(inst, &src_op, dst, opts);
236 dst = translate_z80_ea(inst, &dst_op, dst, opts, DONT_READ, MODIFY);
237 } else {
181 dst = translate_z80_ea(inst, &src_op, dst, opts, READ, DONT_MODIFY); 238 dst = translate_z80_ea(inst, &src_op, dst, opts, READ, DONT_MODIFY);
182 dst = translate_z80_reg(inst, &dst_op, dst, opts); 239 dst = translate_z80_reg(inst, &dst_op, dst, opts);
183 } else { 240 }
184 dst = translate_z80_reg(inst, &src_op, dst, opts); 241 if (src_op.mode == MODE_REG_DIRECT) {
185 dst = translate_z80_ea(inst, &dst_op, dst, opts, DONT_READ, MODIFY); 242 dst = mov_rr(dst, src_op.base, dst_op.base, size);
186 } 243 } else {
187 if (ea_op.mode == MODE_REG_DIRECT) { 244 dst = mov_ir(dst, src_op.disp, dst_op.base, size);
188 dst = mov_rr(dst, ea_op.base, reg_op.base, z80_size(inst));
189 } else {
190 dst = mov_ir(dst, ea_op.disp, reg_op.base, z80_size(inst));
191 } 245 }
192 dst = z80_save_reg(dst, inst, opts); 246 dst = z80_save_reg(dst, inst, opts);
193 dst = z80_save_ea(dst, inst, opts); 247 dst = z80_save_ea(dst, inst, opts);
194 if (!(inst->addr_mode & Z80_DIR)) { 248 if (inst->addr_mode & Z80_DIR) {
195 dst = z80_save_result(dst, inst, opts); 249 dst = z80_save_result(dst, inst);
196 } 250 }
197 break; 251 break;
198 case Z80_PUSH: 252 case Z80_PUSH:
199 dst = zcycles(dst, (inst->reg == Z80_IX || inst->reg == Z80_IY) ? 9 : 5); 253 dst = zcycles(dst, (inst->reg == Z80_IX || inst->reg == Z80_IY) ? 9 : 5);
200 dst = sub_ir(dst, opts->regs[Z80_SP], SZ_W); 254 dst = sub_ir(dst, 2, opts->regs[Z80_SP], SZ_W);
201 dst = translate_z80_reg(inst, &src_op, dst, opts); 255 if (inst->reg == Z80_AF) {
202 dst = mov_rr(dst, src_op.base, SCRATCH1, SZ_W); 256 dst = mov_rdisp8r(dst, CONTEXT, zf_off(ZF_S), SCRATCH2, SZ_B);
203 dst = call(dst, z80_write_word); 257 dst = shl_ir(dst, 1, SCRATCH2, SZ_B);
258 dst = or_rdisp8r(dst, CONTEXT, zf_off(ZF_Z), SCRATCH2, SZ_B);
259 dst = shl_ir(dst, 2, SCRATCH2, SZ_B);
260 dst = or_rdisp8r(dst, CONTEXT, zf_off(ZF_H), SCRATCH2, SZ_B);
261 dst = shl_ir(dst, 2, SCRATCH2, SZ_B);
262 dst = or_rdisp8r(dst, CONTEXT, zf_off(ZF_PV), SCRATCH2, SZ_B);
263 dst = shl_ir(dst, 1, SCRATCH2, SZ_B);
264 dst = or_rdisp8r(dst, CONTEXT, zf_off(ZF_N), SCRATCH2, SZ_B);
265 dst = shl_ir(dst, 1, SCRATCH2, SZ_B);
266 dst = or_rdisp8r(dst, CONTEXT, zf_off(ZF_C), SCRATCH2, SZ_B);
267 dst = shl_ir(dst, 8, SCRATCH2, SZ_W);
268 dst = mov_rr(dst, opts->regs[Z80_A], SCRATCH2, SZ_B);
269 } else {
270 dst = translate_z80_reg(inst, &src_op, dst, opts);
271 dst = mov_rr(dst, src_op.base, SCRATCH2, SZ_W);
272 }
273 dst = mov_rr(dst, opts->regs[Z80_SP], SCRATCH1, SZ_W);
274 dst = call(dst, (uint8_t *)z80_write_word_highfirst);
275 //no call to save_z80_reg needed since there's no chance we'll use the only
276 //the upper half of a register pair
204 break; 277 break;
205 case Z80_POP: 278 case Z80_POP:
206 dst = zcycles(dst, (inst->reg == Z80_IX || inst->reg == Z80_IY) ? 8 : 4); 279 dst = zcycles(dst, (inst->reg == Z80_IX || inst->reg == Z80_IY) ? 8 : 4);
207 dst = sub_ir(dst, opts->regs[Z80_SP], SZ_W); 280 dst = mov_rr(dst, opts->regs[Z80_SP], SCRATCH1, SZ_W);
208 dst = translate_z80_reg(inst, &src_op, dst, opts); 281 dst = call(dst, (uint8_t *)z80_read_word);
209 dst = mov_rr(dst, src_op.base, SCRATCH1, SZ_W); 282 dst = add_ir(dst, 2, opts->regs[Z80_SP], SZ_W);
210 dst = call(dst, z80_write_word); 283 if (inst->reg == Z80_AF) {
284 dst = mov_rr(dst, SCRATCH1, opts->regs[Z80_A], SZ_B);
285 dst = bt_ir(dst, 8, SCRATCH1, SZ_W);
286 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_C));
287 dst = bt_ir(dst, 9, SCRATCH1, SZ_W);
288 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_N));
289 dst = bt_ir(dst, 10, SCRATCH1, SZ_W);
290 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_PV));
291 dst = bt_ir(dst, 12, SCRATCH1, SZ_W);
292 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_H));
293 dst = bt_ir(dst, 14, SCRATCH1, SZ_W);
294 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_Z));
295 dst = bt_ir(dst, 15, SCRATCH1, SZ_W);
296 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_S));
297 } else {
298 dst = translate_z80_reg(inst, &src_op, dst, opts);
299 dst = mov_rr(dst, SCRATCH1, src_op.base, SZ_W);
300 }
301 //no call to save_z80_reg needed since there's no chance we'll use the only
302 //the upper half of a register pair
211 break; 303 break;
212 /*case Z80_EX: 304 /*case Z80_EX:
213 case Z80_EXX: 305 case Z80_EXX:
214 case Z80_LDI: 306 case Z80_LDI:
215 case Z80_LDIR: 307 case Z80_LDIR:
220 case Z80_CPD: 312 case Z80_CPD:
221 case Z80_CPDR: 313 case Z80_CPDR:
222 break;*/ 314 break;*/
223 case Z80_ADD: 315 case Z80_ADD:
224 cycles = 4; 316 cycles = 4;
225 if (inst->addr_mode == Z80_IX_INDIRECT || inst->addr_mdoe == Z80_IY_INDIRECT) { 317 if (inst->addr_mode == Z80_IX_DISPLACE || inst->addr_mode == Z80_IY_DISPLACE) {
226 cycles += 12; 318 cycles += 12;
227 } else if(inst->addr_mode == Z80_IMMED) { 319 } else if(inst->addr_mode == Z80_IMMED) {
228 cycles += 3; 320 cycles += 3;
229 } else if(z80_size(inst) == SZ_W) { 321 } else if(z80_size(inst) == SZ_W) {
230 cycles += 4; 322 cycles += 4;
236 dst = add_rr(dst, src_op.base, dst_op.base, z80_size(inst)); 328 dst = add_rr(dst, src_op.base, dst_op.base, z80_size(inst));
237 } else { 329 } else {
238 dst = add_ir(dst, src_op.disp, dst_op.base, z80_size(inst)); 330 dst = add_ir(dst, src_op.disp, dst_op.base, z80_size(inst));
239 } 331 }
240 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_C)); 332 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_C));
241 dst = mov_irdisp8(dst, 0, CONTEXT, zf_off(ZF_N)); 333 dst = mov_irdisp8(dst, 0, CONTEXT, zf_off(ZF_N), SZ_B);
242 //TODO: Implement half-carry flag 334 //TODO: Implement half-carry flag
243 if (z80_size(inst) == SZ_B) { 335 if (z80_size(inst) == SZ_B) {
244 dst = setcc_rdisp8(dst, CC_O, zf_off(ZF_PV)); 336 dst = setcc_rdisp8(dst, CC_O, CONTEXT, zf_off(ZF_PV));
245 dst = setcc_rdisp8(dst, CC_Z, zf_off(ZF_Z)); 337 dst = setcc_rdisp8(dst, CC_Z, CONTEXT, zf_off(ZF_Z));
246 dst = setcc_rdisp8(dst, CC_S, zf_off(ZF_S)); 338 dst = setcc_rdisp8(dst, CC_S, CONTEXT, zf_off(ZF_S));
247 } 339 }
248 dst = z80_save_reg(dst, inst, opts); 340 dst = z80_save_reg(dst, inst, opts);
249 dst = z80_save_ea(dst, inst, opts); 341 dst = z80_save_ea(dst, inst, opts);
250 break; 342 break;
251 /*case Z80_ADC: 343 /*case Z80_ADC:
252 break;*/ 344 break;*/
253 case Z80_SUB: 345 case Z80_SUB:
254 cycles = 4; 346 cycles = 4;
255 if (inst->addr_mode == Z80_IX_INDIRECT || inst->addr_mdoe == Z80_IY_INDIRECT) { 347 if (inst->addr_mode == Z80_IX_DISPLACE || inst->addr_mode == Z80_IY_DISPLACE) {
256 cycles += 12; 348 cycles += 12;
257 } else if(inst->addr_mode == Z80_IMMED) { 349 } else if(inst->addr_mode == Z80_IMMED) {
258 cycles += 3; 350 cycles += 3;
259 } 351 }
260 dst = zcycles(dst, cycles); 352 dst = zcycles(dst, cycles);
264 dst = sub_rr(dst, src_op.base, dst_op.base, z80_size(inst)); 356 dst = sub_rr(dst, src_op.base, dst_op.base, z80_size(inst));
265 } else { 357 } else {
266 dst = sub_ir(dst, src_op.disp, dst_op.base, z80_size(inst)); 358 dst = sub_ir(dst, src_op.disp, dst_op.base, z80_size(inst));
267 } 359 }
268 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_C)); 360 dst = setcc_rdisp8(dst, CC_C, CONTEXT, zf_off(ZF_C));
269 dst = mov_irdisp8(dst, 1, CONTEXT, zf_off(ZF_N)); 361 dst = mov_irdisp8(dst, 1, CONTEXT, zf_off(ZF_N), SZ_B);
270 dst = setcc_rdisp8(dst, CC_O, zf_off(ZF_PV)); 362 dst = setcc_rdisp8(dst, CC_O, CONTEXT, zf_off(ZF_PV));
271 //TODO: Implement half-carry flag 363 //TODO: Implement half-carry flag
272 dst = setcc_rdisp8(dst, CC_Z, zf_off(ZF_Z)); 364 dst = setcc_rdisp8(dst, CC_Z, CONTEXT, zf_off(ZF_Z));
273 dst = setcc_rdisp8(dst, CC_S, zf_off(ZF_S) 365 dst = setcc_rdisp8(dst, CC_S, CONTEXT, zf_off(ZF_S));
274 dst = z80_save_reg(dst, inst, opts); 366 dst = z80_save_reg(dst, inst, opts);
275 dst = z80_save_ea(dst, inst, opts); 367 dst = z80_save_ea(dst, inst, opts);
276 break; 368 break;
277 /*case Z80_SBC: 369 /*case Z80_SBC:
278 case Z80_AND: 370 case Z80_AND:
292 if (dst_op.mode == MODE_UNUSED) { 384 if (dst_op.mode == MODE_UNUSED) {
293 dst = translate_z80_ea(inst, &dst_op, dst, opts, READ, MODIFY); 385 dst = translate_z80_ea(inst, &dst_op, dst, opts, READ, MODIFY);
294 } 386 }
295 dst = add_ir(dst, 1, dst_op.base, z80_size(inst)); 387 dst = add_ir(dst, 1, dst_op.base, z80_size(inst));
296 if (z80_size(inst) == SZ_B) { 388 if (z80_size(inst) == SZ_B) {
297 dst = mov_irdisp8(dst, 0, CONTEXT, zf_off(ZF_N)); 389 dst = mov_irdisp8(dst, 0, CONTEXT, zf_off(ZF_N), SZ_B);
298 //TODO: Implement half-carry flag 390 //TODO: Implement half-carry flag
299 dst = setcc_rdisp8(dst, CC_O, zf_off(ZF_PV)); 391 dst = setcc_rdisp8(dst, CC_O, CONTEXT, zf_off(ZF_PV));
300 dst = setcc_rdisp8(dst, CC_Z, zf_off(ZF_Z)); 392 dst = setcc_rdisp8(dst, CC_Z, CONTEXT, zf_off(ZF_Z));
301 dst = setcc_rdisp8(dst, CC_S, zf_off(ZF_S)); 393 dst = setcc_rdisp8(dst, CC_S, CONTEXT, zf_off(ZF_S));
302 } 394 }
303 dst = z80_save_reg(dst, inst, opts); 395 dst = z80_save_reg(dst, inst, opts);
304 dst = z80_save_ea(dst, inst, opts); 396 dst = z80_save_ea(dst, inst, opts);
305 break; 397 break;
306 /*case Z80_DEC: 398 /*case Z80_DEC:
336 case Z80_BIT: 428 case Z80_BIT:
337 case Z80_SET: 429 case Z80_SET:
338 case Z80_RES: 430 case Z80_RES:
339 case Z80_JP: 431 case Z80_JP:
340 case Z80_JPCC: 432 case Z80_JPCC:
341 case Z80_JR: 433 case Z80_JR:*/
342 case Z80_JRCC: 434 case Z80_JRCC: {
343 case Z80_DJNZ: 435 dst = zcycles(dst, 7);//T States: 4,3
344 case Z80_CALL: 436 uint8_t cond = CC_Z;
345 case Z80_CALLCC: 437 switch (inst->reg)
438 {
439 case Z80_CC_NZ:
440 cond = CC_NZ;
441 case Z80_CC_Z:
442 dst = cmp_irdisp8(dst, 0, CONTEXT, zf_off(ZF_Z), SZ_B);
443 break;
444 case Z80_CC_NC:
445 cond = CC_NZ;
446 case Z80_CC_C:
447 dst = cmp_irdisp8(dst, 0, CONTEXT, zf_off(ZF_C), SZ_B);
448 break;
449 }
450 uint8_t *no_jump_off = dst+1;
451 dst = jcc(dst, cond, dst+2);
452 dst = zcycles(dst, 5);//T States: 5
453 uint16_t dest_addr = address + inst->immed + 2;
454 if (dest_addr < 0x4000) {
455 uint8_t * call_dst = z80_get_native_address(context, dest_addr);
456 if (!call_dst) {
457 opts->deferred = defer_address(opts->deferred, dest_addr, dst + 1);
458 //fake address to force large displacement
459 call_dst = dst + 256;
460 }
461 dst = jmp(dst, call_dst);
462 } else {
463 dst = mov_ir(dst, dest_addr, SCRATCH1, SZ_W);
464 dst = call(dst, (uint8_t *)z80_native_addr);
465 dst = jmp_r(dst, SCRATCH1);
466 }
467 *no_jump_off = dst - (no_jump_off+1);
468 break;
469 }
470 //case Z80_DJNZ:*/
471 case Z80_CALL: {
472 dst = zcycles(dst, 11);//T States: 4,3,4
473 dst = sub_ir(dst, 2, opts->regs[Z80_SP], SZ_W);
474 dst = mov_ir(dst, address + 3, SCRATCH2, SZ_W);
475 dst = mov_rr(dst, opts->regs[Z80_SP], SCRATCH1, SZ_W);
476 dst = call(dst, (uint8_t *)z80_write_word_highfirst);//T States: 3, 3
477 if (inst->immed < 0x4000) {
478 uint8_t * call_dst = z80_get_native_address(context, inst->immed);
479 if (!call_dst) {
480 opts->deferred = defer_address(opts->deferred, inst->immed, dst + 1);
481 //fake address to force large displacement
482 call_dst = dst + 256;
483 }
484 dst = jmp(dst, call_dst);
485 } else {
486 dst = mov_ir(dst, inst->immed, SCRATCH1, SZ_W);
487 dst = call(dst, (uint8_t *)z80_native_addr);
488 dst = jmp_r(dst, SCRATCH1);
489 }
490 break;
491 }
492 //case Z80_CALLCC:
346 case Z80_RET: 493 case Z80_RET:
347 case Z80_RETCC: 494 dst = zcycles(dst, 4);//T States: 4
495 dst = mov_rr(dst, opts->regs[Z80_SP], SCRATCH1, SZ_W);
496 dst = call(dst, (uint8_t *)z80_read_word);//T STates: 3, 3
497 dst = add_ir(dst, 2, opts->regs[Z80_SP], SZ_W);
498 dst = call(dst, (uint8_t *)z80_native_addr);
499 dst = jmp_r(dst, SCRATCH1);
500 break;
501 /*case Z80_RETCC:
348 case Z80_RETI: 502 case Z80_RETI:
349 case Z80_RETN: 503 case Z80_RETN:
350 case Z80_RST: 504 case Z80_RST:
351 case Z80_IN: 505 case Z80_IN:
352 case Z80_INI: 506 case Z80_INI:
356 case Z80_OUT: 510 case Z80_OUT:
357 case Z80_OUTI: 511 case Z80_OUTI:
358 case Z80_OTIR: 512 case Z80_OTIR:
359 case Z80_OUTD: 513 case Z80_OUTD:
360 case Z80_OTDR:*/ 514 case Z80_OTDR:*/
361 default: 515 default: {
362 fprintf(stderr, "unimplemented instruction: %d\n", inst->op); 516 char disbuf[80];
517 z80_disasm(inst, disbuf);
518 fprintf(stderr, "unimplemented instruction: %s\n", disbuf);
363 exit(1); 519 exit(1);
364 } 520 }
365 } 521 }
366 522 return dst;
367 void translate_z80_stream(z80_context * context, uint16_t address) 523 }
368 { 524
369 } 525 uint8_t * z80_get_native_address(z80_context * context, uint32_t address)
526 {
527 native_map_slot *map;
528 if (address < 0x4000) {
529 address &= 0x1FFF;
530 map = context->static_code_map;
531 } else if (address >= 0x8000) {
532 address &= 0x7FFF;
533 map = context->banked_code_map + context->bank_reg;
534 } else {
535 return NULL;
536 }
537 if (!map->base || !map->offsets || map->offsets[address] == INVALID_OFFSET) {
538 return NULL;
539 }
540 return map->base + map->offsets[address];
541 }
542
543 //TODO: Record z80 instruction size and code size for addresses to support modification of translated code
544 void z80_map_native_address(z80_context * context, uint32_t address, uint8_t * native_address)
545 {
546 native_map_slot *map;
547 if (address < 0x4000) {
548 address &= 0x1FFF;
549 map = context->static_code_map;
550 } else if (address >= 0x8000) {
551 address &= 0x7FFF;
552 map = context->banked_code_map + context->bank_reg;
553 if (!map->offsets) {
554 map->offsets = malloc(sizeof(int32_t) * 0x8000);
555 memset(map->offsets, 0xFF, sizeof(int32_t) * 0x8000);
556 }
557 } else {
558 return;
559 }
560 if (!map->base) {
561 map->base = native_address;
562 }
563 map->offsets[address] = native_address - map->base;
564 }
565
566 uint8_t * z80_get_native_address_trans(z80_context * context, uint32_t address)
567 {
568 uint8_t * addr = z80_get_native_address(context, address);
569 if (!addr) {
570 translate_z80_stream(context, address);
571 addr = z80_get_native_address(context, address);
572 }
573 return addr;
574 }
575
576 void translate_z80_stream(z80_context * context, uint32_t address)
577 {
578 char disbuf[80];
579 if (z80_get_native_address(context, address)) {
580 return;
581 }
582 x86_z80_options * opts = context->options;
583 uint8_t * encoded = NULL, *next;
584 if (address < 0x4000) {
585 encoded = context->mem_pointers[0] + (address & 0x1FFF);
586 } else if(address >= 0x8000 && context->mem_pointers[1]) {
587 encoded = context->mem_pointers[1] + (address & 0x7FFF);
588 }
589 while (encoded != NULL)
590 {
591 z80inst inst;
592 printf("translating Z80 code at address %X\n", address);
593 do {
594 if (opts->code_end-opts->cur_code < MAX_NATIVE_SIZE) {
595 if (opts->code_end-opts->cur_code < 5) {
596 puts("out of code memory, not enough space for jmp to next chunk");
597 exit(1);
598 }
599 size_t size = 1024*1024;
600 opts->cur_code = alloc_code(&size);
601 opts->code_end = opts->cur_code + size;
602 jmp(opts->cur_code, opts->cur_code);
603 }
604 if (address > 0x4000 & address < 0x8000) {
605 opts->cur_code = xor_rr(opts->cur_code, RDI, RDI, SZ_D);
606 opts->cur_code = call(opts->cur_code, (uint8_t *)exit);
607 break;
608 }
609 uint8_t * existing = z80_get_native_address(context, address);
610 if (existing) {
611 opts->cur_code = jmp(opts->cur_code, existing);
612 break;
613 }
614 next = z80_decode(encoded, &inst);
615 z80_disasm(&inst, disbuf);
616 if (inst.op == Z80_NOP) {
617 printf("%X\t%s(%d)\n", address, disbuf, inst.immed);
618 } else {
619 printf("%X\t%s\n", address, disbuf);
620 }
621 z80_map_native_address(context, address, opts->cur_code);
622 opts->cur_code = translate_z80inst(&inst, opts->cur_code, context, address);
623 address += next-encoded;
624 encoded = next;
625 } while (!(inst.op == Z80_RET || inst.op == Z80_RETI || inst.op == Z80_RETN || (inst.op = Z80_NOP && inst.immed == 42)));
626 process_deferred(&opts->deferred, context, (native_addr_func)z80_get_native_address);
627 if (opts->deferred) {
628 address = opts->deferred->address;
629 printf("defferred address: %X\n", address);
630 if (address < 0x4000) {
631 encoded = context->mem_pointers[0] + (address & 0x1FFF);
632 } else if (address > 0x8000 && context->mem_pointers[1]) {
633 encoded = context->mem_pointers[1] + (address & 0x7FFF);
634 } else {
635 printf("attempt to translate non-memory address: %X\n", address);
636 exit(1);
637 }
638 } else {
639 encoded = NULL;
640 }
641 }
642 }
643
644 void init_x86_z80_opts(x86_z80_options * options)
645 {
646 options->flags = 0;
647 options->regs[Z80_B] = BH;
648 options->regs[Z80_C] = RBX;
649 options->regs[Z80_D] = CH;
650 options->regs[Z80_E] = RCX;
651 options->regs[Z80_H] = AH;
652 options->regs[Z80_L] = RAX;
653 options->regs[Z80_IXH] = DH;
654 options->regs[Z80_IXL] = RDX;
655 options->regs[Z80_IYH] = -1;
656 options->regs[Z80_IYL] = -1;
657 options->regs[Z80_I] = -1;
658 options->regs[Z80_R] = -1;
659 options->regs[Z80_A] = R10;
660 options->regs[Z80_BC] = RBX;
661 options->regs[Z80_DE] = RCX;
662 options->regs[Z80_HL] = RAX;
663 options->regs[Z80_SP] = R9;
664 options->regs[Z80_AF] = -1;
665 options->regs[Z80_IX] = RDX;
666 options->regs[Z80_IY] = R8;
667 size_t size = 1024 * 1024;
668 options->cur_code = alloc_code(&size);
669 options->code_end = options->cur_code + size;
670 options->deferred = NULL;
671 }
672
673 void init_z80_context(z80_context * context, x86_z80_options * options)
674 {
675 memset(context, 0, sizeof(*context));
676 context->static_code_map = malloc(sizeof(context->static_code_map));
677 context->static_code_map->offsets = malloc(sizeof(int32_t) * 0x2000);
678 memset(context->static_code_map->offsets, 0xFF, sizeof(int32_t) * 0x2000);
679 context->banked_code_map = malloc(sizeof(native_map_slot) * (1 << 9));
680 context->options = options;
681 }
682
683 void z80_reset(z80_context * context)
684 {
685 context->native_pc = z80_get_native_address_trans(context, 0);
686 }
687
688