comparison m68k_to_x86.c @ 550:96489fb27dbf

Apart from the Z80 core, BlastEm now supports 32-bit x86
author Michael Pavone <pavone@retrodev.com>
date Wed, 19 Feb 2014 00:22:27 -0800
parents a3afee2271ce
children 5af986d2b9da
comparison
equal deleted inserted replaced
549:32da1e0d5e55 550:96489fb27dbf
3369 } 3369 }
3370 } 3370 }
3371 break; 3371 break;
3372 case M68K_ILLEGAL: 3372 case M68K_ILLEGAL:
3373 dst = call(dst, opts->save_context); 3373 dst = call(dst, opts->save_context);
3374 dst = mov_rr(dst, CONTEXT, RDI, SZ_Q); 3374 #ifdef X86_64
3375 dst = mov_rr(dst, CONTEXT, RDI, SZ_PTR);
3376 #else
3377 dst = push_r(dst, CONTEXT);
3378 #endif
3375 dst = call(dst, (uint8_t *)print_regs_exit); 3379 dst = call(dst, (uint8_t *)print_regs_exit);
3376 break; 3380 break;
3377 case M68K_MOVE_FROM_SR: 3381 case M68K_MOVE_FROM_SR:
3378 //TODO: Trap if not in system mode 3382 //TODO: Trap if not in system mode
3379 dst = call(dst, opts->get_sr); 3383 dst = call(dst, opts->get_sr);
3604 } 3608 }
3605 } 3609 }
3606 break; 3610 break;
3607 case M68K_RESET: 3611 case M68K_RESET:
3608 dst = call(dst, opts->save_context); 3612 dst = call(dst, opts->save_context);
3609 dst = mov_rr(dst, CONTEXT, RDI, SZ_Q); 3613 #ifdef X86_64
3614 dst = mov_rr(dst, CONTEXT, RDI, SZ_PTR);
3615 #else
3616 dst = push_r(dst, CONTEXT);
3617 #endif
3610 dst = call(dst, (uint8_t *)print_regs_exit); 3618 dst = call(dst, (uint8_t *)print_regs_exit);
3611 break; 3619 break;
3612 case M68K_ROL: 3620 case M68K_ROL:
3613 case M68K_ROR: 3621 case M68K_ROR:
3614 dst = set_flag(dst, 0, FLAG_V, opts); 3622 dst = set_flag(dst, 0, FLAG_V, opts);
4106 dst = opts->cur_code; 4114 dst = opts->cur_code;
4107 dst_end = opts->code_end; 4115 dst_end = opts->code_end;
4108 } 4116 }
4109 if (address >= 0x400000 && address < 0xE00000) { 4117 if (address >= 0x400000 && address < 0xE00000) {
4110 dst = xor_rr(dst, RDI, RDI, SZ_D); 4118 dst = xor_rr(dst, RDI, RDI, SZ_D);
4119 #ifdef X86_32
4120 dst = push_r(dst, RDI);
4121 #endif
4111 dst = call(dst, (uint8_t *)exit); 4122 dst = call(dst, (uint8_t *)exit);
4112 break; 4123 break;
4113 } 4124 }
4114 uint8_t * existing = get_native_address(opts->native_code_map, address); 4125 uint8_t * existing = get_native_address(opts->native_code_map, address);
4115 if (existing) { 4126 if (existing) {
4235 options->code_end = options->cur_code + size; 4246 options->code_end = options->cur_code + size;
4236 } 4247 }
4237 uint8_t * rdst = options->retrans_stub = options->cur_code; 4248 uint8_t * rdst = options->retrans_stub = options->cur_code;
4238 rdst = call(rdst, options->save_context); 4249 rdst = call(rdst, options->save_context);
4239 rdst = push_r(rdst, CONTEXT); 4250 rdst = push_r(rdst, CONTEXT);
4251 #ifdef X86_32
4252 rdst = push_r(rdst, CONTEXT);
4253 rdst = push_r(rdst, SCRATCH2);
4254 #endif
4240 rdst = call(rdst, (uint8_t *)m68k_retranslate_inst); 4255 rdst = call(rdst, (uint8_t *)m68k_retranslate_inst);
4256 #ifdef X86_32
4257 rdst = add_ir(rdst, 8, RSP, SZ_D);
4258 #endif
4241 rdst = pop_r(rdst, CONTEXT); 4259 rdst = pop_r(rdst, CONTEXT);
4242 rdst = mov_rr(rdst, RAX, SCRATCH1, SZ_Q); 4260 rdst = mov_rr(rdst, RAX, SCRATCH1, SZ_PTR);
4243 rdst = call(rdst, options->load_context); 4261 rdst = call(rdst, options->load_context);
4244 rdst = jmp_r(rdst, SCRATCH1); 4262 rdst = jmp_r(rdst, SCRATCH1);
4245 options->cur_code = rdst; 4263 options->cur_code = rdst;
4246 } 4264 }
4247 dst = jmp(dst, options->retrans_stub); 4265 dst = jmp(dst, options->retrans_stub);
4273 dst = bp_stub; 4291 dst = bp_stub;
4274 4292
4275 //Save context and call breakpoint handler 4293 //Save context and call breakpoint handler
4276 dst = call(dst, opts->save_context); 4294 dst = call(dst, opts->save_context);
4277 dst = push_r(dst, SCRATCH1); 4295 dst = push_r(dst, SCRATCH1);
4278 dst = mov_rr(dst, CONTEXT, RDI, SZ_Q); 4296 #ifdef X86_64
4297 dst = mov_rr(dst, CONTEXT, RDI, SZ_PTR);
4279 dst = mov_rr(dst, SCRATCH1, RSI, SZ_D); 4298 dst = mov_rr(dst, SCRATCH1, RSI, SZ_D);
4299 #else
4300 dst = push_r(dst, SCRATCH1);
4301 dst = push_r(dst, CONTEXT);
4302 #endif
4280 dst = call(dst, bp_handler); 4303 dst = call(dst, bp_handler);
4281 dst = mov_rr(dst, RAX, CONTEXT, SZ_Q); 4304 #ifdef X86_32
4305 dst = add_ir(dst, 8, RSP, SZ_D);
4306 #endif
4307 dst = mov_rr(dst, RAX, CONTEXT, SZ_PTR);
4282 //Restore context 4308 //Restore context
4283 dst = call(dst, opts->load_context); 4309 dst = call(dst, opts->load_context);
4284 dst = pop_r(dst, SCRATCH1); 4310 dst = pop_r(dst, SCRATCH1);
4285 //do prologue stuff 4311 //do prologue stuff
4286 dst = cmp_rr(dst, CYCLES, LIMIT, SZ_D); 4312 dst = cmp_rr(dst, CYCLES, LIMIT, SZ_D);
4288 dst = jcc(dst, CC_NC, dst + 7); 4314 dst = jcc(dst, CC_NC, dst + 7);
4289 dst = call(dst, opts->handle_cycle_limit_int); 4315 dst = call(dst, opts->handle_cycle_limit_int);
4290 *jmp_off = dst - (jmp_off+1); 4316 *jmp_off = dst - (jmp_off+1);
4291 //jump back to body of translated instruction 4317 //jump back to body of translated instruction
4292 dst = pop_r(dst, SCRATCH1); 4318 dst = pop_r(dst, SCRATCH1);
4293 dst = add_ir(dst, check_int_size - (native-start_native), SCRATCH1, SZ_Q); 4319 dst = add_ir(dst, check_int_size - (native-start_native), SCRATCH1, SZ_PTR);
4294 dst = jmp_r(dst, SCRATCH1); 4320 dst = jmp_r(dst, SCRATCH1);
4295 opts->cur_code = dst; 4321 opts->cur_code = dst;
4296 } else { 4322 } else {
4297 native = call(native, bp_stub); 4323 native = call(native, bp_stub);
4298 } 4324 }
4373 cfun = NULL; 4399 cfun = NULL;
4374 } 4400 }
4375 if(memmap[chunk].buffer && memmap[chunk].flags & access_flag) { 4401 if(memmap[chunk].buffer && memmap[chunk].flags & access_flag) {
4376 if (memmap[chunk].flags & MMAP_PTR_IDX) { 4402 if (memmap[chunk].flags & MMAP_PTR_IDX) {
4377 if (memmap[chunk].flags & MMAP_FUNC_NULL) { 4403 if (memmap[chunk].flags & MMAP_FUNC_NULL) {
4378 dst = cmp_irdisp8(dst, 0, CONTEXT, offsetof(m68k_context, mem_pointers) + sizeof(void*) * memmap[chunk].ptr_index, SZ_Q); 4404 dst = cmp_irdisp8(dst, 0, CONTEXT, offsetof(m68k_context, mem_pointers) + sizeof(void*) * memmap[chunk].ptr_index, SZ_PTR);
4379 uint8_t * not_null = dst+1; 4405 uint8_t * not_null = dst+1;
4380 dst = jcc(dst, CC_NZ, dst+2); 4406 dst = jcc(dst, CC_NZ, dst+2);
4381 dst = call(dst, opts->save_context); 4407 dst = call(dst, opts->save_context);
4408 #ifdef X86_64
4382 if (is_write) { 4409 if (is_write) {
4383 if (SCRATCH2 != RDI) { 4410 if (SCRATCH2 != RDI) {
4384 dst = mov_rr(dst, SCRATCH2, RDI, SZ_D); 4411 dst = mov_rr(dst, SCRATCH2, RDI, SZ_D);
4385 } 4412 }
4386 dst = mov_rr(dst, SCRATCH1, RDX, size); 4413 dst = mov_rr(dst, SCRATCH1, RDX, size);
4393 dst = jcc(dst, CC_NZ, dst+2); 4420 dst = jcc(dst, CC_NZ, dst+2);
4394 dst = call(dst, cfun); 4421 dst = call(dst, cfun);
4395 uint8_t *no_adjust = dst+1; 4422 uint8_t *no_adjust = dst+1;
4396 dst = jmp(dst, dst+2); 4423 dst = jmp(dst, dst+2);
4397 *adjust_rsp = dst - (adjust_rsp + 1); 4424 *adjust_rsp = dst - (adjust_rsp + 1);
4398 dst = sub_ir(dst, 8, RSP, SZ_Q); 4425 dst = sub_ir(dst, 8, RSP, SZ_PTR);
4399 dst = call(dst, cfun); 4426 dst = call(dst, cfun);
4400 dst = add_ir(dst, 8, RSP, SZ_Q); 4427 dst = add_ir(dst, 8, RSP, SZ_PTR);
4401 *no_adjust = dst - (no_adjust + 1); 4428 *no_adjust = dst - (no_adjust + 1);
4429 #else
4402 if (is_write) { 4430 if (is_write) {
4403 dst = mov_rr(dst, RAX, CONTEXT, SZ_Q); 4431 dst = push_r(dst, SCRATCH1);
4432 } else {
4433 dst = push_r(dst, CONTEXT);//save CONTEXT for later
4434 }
4435 dst = push_r(dst, CONTEXT);
4436 dst = push_r(dst, is_write ? SCRATCH2 : SCRATCH1);
4437 dst = call(dst, cfun);
4438 dst = add_ir(dst, is_write ? 12 : 8, RSP, SZ_D);
4439 #endif
4440 if (is_write) {
4441 dst = mov_rr(dst, RAX, CONTEXT, SZ_PTR);
4404 } else { 4442 } else {
4405 dst = pop_r(dst, CONTEXT); 4443 dst = pop_r(dst, CONTEXT);
4406 dst = mov_rr(dst, RAX, SCRATCH1, size); 4444 dst = mov_rr(dst, RAX, SCRATCH1, size);
4407 } 4445 }
4408 dst = jmp(dst, opts->load_context); 4446 dst = jmp(dst, opts->load_context);
4410 *not_null = dst - (not_null + 1); 4448 *not_null = dst - (not_null + 1);
4411 } 4449 }
4412 if (size == SZ_B) { 4450 if (size == SZ_B) {
4413 dst = xor_ir(dst, 1, adr_reg, SZ_D); 4451 dst = xor_ir(dst, 1, adr_reg, SZ_D);
4414 } 4452 }
4415 dst = add_rdisp8r(dst, CONTEXT, offsetof(m68k_context, mem_pointers) + sizeof(void*) * memmap[chunk].ptr_index, adr_reg, SZ_Q); 4453 dst = add_rdisp8r(dst, CONTEXT, offsetof(m68k_context, mem_pointers) + sizeof(void*) * memmap[chunk].ptr_index, adr_reg, SZ_PTR);
4416 if (is_write) { 4454 if (is_write) {
4417 dst = mov_rrind(dst, SCRATCH1, SCRATCH2, size); 4455 dst = mov_rrind(dst, SCRATCH1, SCRATCH2, size);
4418 4456
4419 } else { 4457 } else {
4420 dst = mov_rindr(dst, SCRATCH1, SCRATCH1, size); 4458 dst = mov_rindr(dst, SCRATCH1, SCRATCH1, size);
4440 dst = shr_ir(dst, 1, adr_reg, SZ_D); 4478 dst = shr_ir(dst, 1, adr_reg, SZ_D);
4441 if ((memmap[chunk].flags & MMAP_ONLY_EVEN) && is_write) { 4479 if ((memmap[chunk].flags & MMAP_ONLY_EVEN) && is_write) {
4442 dst = shr_ir(dst, 8, SCRATCH1, SZ_W); 4480 dst = shr_ir(dst, 8, SCRATCH1, SZ_W);
4443 } 4481 }
4444 } 4482 }
4445 if ((int64_t)memmap[chunk].buffer <= 0x7FFFFFFF && (int64_t)memmap[chunk].buffer >= -2147483648) { 4483 if ((intptr_t)memmap[chunk].buffer <= 0x7FFFFFFF && (intptr_t)memmap[chunk].buffer >= -2147483648) {
4446 if (is_write) { 4484 if (is_write) {
4447 dst = mov_rrdisp32(dst, SCRATCH1, SCRATCH2, (int64_t)memmap[chunk].buffer, tmp_size); 4485 dst = mov_rrdisp32(dst, SCRATCH1, SCRATCH2, (intptr_t)memmap[chunk].buffer, tmp_size);
4448 } else { 4486 } else {
4449 dst = mov_rdisp32r(dst, SCRATCH1, (int64_t)memmap[chunk].buffer, SCRATCH1, tmp_size); 4487 dst = mov_rdisp32r(dst, SCRATCH1, (intptr_t)memmap[chunk].buffer, SCRATCH1, tmp_size);
4450 } 4488 }
4451 } else { 4489 } else {
4452 if (is_write) { 4490 if (is_write) {
4453 dst = push_r(dst, SCRATCH1); 4491 dst = push_r(dst, SCRATCH1);
4454 dst = mov_ir(dst, (int64_t)memmap[chunk].buffer, SCRATCH1, SZ_Q); 4492 dst = mov_ir(dst, (intptr_t)memmap[chunk].buffer, SCRATCH1, SZ_PTR);
4455 dst = add_rr(dst, SCRATCH1, SCRATCH2, SZ_Q); 4493 dst = add_rr(dst, SCRATCH1, SCRATCH2, SZ_PTR);
4456 dst = pop_r(dst, SCRATCH1); 4494 dst = pop_r(dst, SCRATCH1);
4457 dst = mov_rrind(dst, SCRATCH1, SCRATCH2, tmp_size); 4495 dst = mov_rrind(dst, SCRATCH1, SCRATCH2, tmp_size);
4458 } else { 4496 } else {
4459 dst = mov_ir(dst, (int64_t)memmap[chunk].buffer, SCRATCH2, SZ_Q); 4497 dst = mov_ir(dst, (intptr_t)memmap[chunk].buffer, SCRATCH2, SZ_PTR);
4460 dst = mov_rindexr(dst, SCRATCH2, SCRATCH1, 1, SCRATCH1, tmp_size); 4498 dst = mov_rindexr(dst, SCRATCH2, SCRATCH1, 1, SCRATCH1, tmp_size);
4461 } 4499 }
4462 } 4500 }
4463 if (size != tmp_size && !is_write) { 4501 if (size != tmp_size && !is_write) {
4464 if (memmap[chunk].flags & MMAP_ONLY_EVEN) { 4502 if (memmap[chunk].flags & MMAP_ONLY_EVEN) {
4474 dst = shr_ir(dst, 11, SCRATCH1, SZ_D); 4512 dst = shr_ir(dst, 11, SCRATCH1, SZ_D);
4475 dst = bt_rrdisp32(dst, SCRATCH1, CONTEXT, offsetof(m68k_context, ram_code_flags), SZ_D); 4513 dst = bt_rrdisp32(dst, SCRATCH1, CONTEXT, offsetof(m68k_context, ram_code_flags), SZ_D);
4476 uint8_t * not_code = dst+1; 4514 uint8_t * not_code = dst+1;
4477 dst = jcc(dst, CC_NC, dst+2); 4515 dst = jcc(dst, CC_NC, dst+2);
4478 dst = call(dst, opts->save_context); 4516 dst = call(dst, opts->save_context);
4517 #ifdef X86_32
4518 dst = push_r(dst, CONTEXT);
4519 dst = push_r(dst, SCRATCH2);
4520 #endif
4479 dst = call(dst, (uint8_t *)m68k_handle_code_write); 4521 dst = call(dst, (uint8_t *)m68k_handle_code_write);
4480 dst = mov_rr(dst, RAX, CONTEXT, SZ_Q); 4522 #ifdef X86_32
4523 dst = add_ir(dst, 8, RSP, SZ_D);
4524 #endif
4525 dst = mov_rr(dst, RAX, CONTEXT, SZ_PTR);
4481 dst = call(dst, opts->load_context); 4526 dst = call(dst, opts->load_context);
4482 *not_code = dst - (not_code+1); 4527 *not_code = dst - (not_code+1);
4483 } 4528 }
4484 dst = retn(dst); 4529 dst = retn(dst);
4485 } else if (cfun) { 4530 } else if (cfun) {
4486 dst = call(dst, opts->save_context); 4531 dst = call(dst, opts->save_context);
4532 #ifdef X86_64
4487 if (is_write) { 4533 if (is_write) {
4488 if (SCRATCH2 != RDI) { 4534 if (SCRATCH2 != RDI) {
4489 dst = mov_rr(dst, SCRATCH2, RDI, SZ_D); 4535 dst = mov_rr(dst, SCRATCH2, RDI, SZ_D);
4490 } 4536 }
4491 dst = mov_rr(dst, SCRATCH1, RDX, size); 4537 dst = mov_rr(dst, SCRATCH1, RDX, size);
4498 dst = jcc(dst, CC_NZ, dst+2); 4544 dst = jcc(dst, CC_NZ, dst+2);
4499 dst = call(dst, cfun); 4545 dst = call(dst, cfun);
4500 uint8_t *no_adjust = dst+1; 4546 uint8_t *no_adjust = dst+1;
4501 dst = jmp(dst, dst+2); 4547 dst = jmp(dst, dst+2);
4502 *adjust_rsp = dst - (adjust_rsp + 1); 4548 *adjust_rsp = dst - (adjust_rsp + 1);
4503 dst = sub_ir(dst, 8, RSP, SZ_Q); 4549 dst = sub_ir(dst, 8, RSP, SZ_PTR);
4504 dst = call(dst, cfun); 4550 dst = call(dst, cfun);
4505 dst = add_ir(dst, 8, RSP, SZ_Q); 4551 dst = add_ir(dst, 8, RSP, SZ_PTR);
4506 *no_adjust = dst - (no_adjust+1); 4552 *no_adjust = dst - (no_adjust+1);
4553 #else
4507 if (is_write) { 4554 if (is_write) {
4508 dst = mov_rr(dst, RAX, CONTEXT, SZ_Q); 4555 dst = push_r(dst, SCRATCH1);
4556 } else {
4557 dst = push_r(dst, CONTEXT);//save CONTEXT for later
4558 }
4559 dst = push_r(dst, CONTEXT);
4560 dst = push_r(dst, is_write ? SCRATCH2 : SCRATCH1);
4561 dst = call(dst, cfun);
4562 dst = add_ir(dst, is_write ? 12 : 8, RSP, SZ_D);
4563 #endif
4564 if (is_write) {
4565 dst = mov_rr(dst, RAX, CONTEXT, SZ_PTR);
4509 } else { 4566 } else {
4510 dst = pop_r(dst, CONTEXT); 4567 dst = pop_r(dst, CONTEXT);
4511 dst = mov_rr(dst, RAX, SCRATCH1, size); 4568 dst = mov_rr(dst, RAX, SCRATCH1, size);
4512 } 4569 }
4513 dst = jmp(dst, opts->load_context); 4570 dst = jmp(dst, opts->load_context);
4609 dst = mov_rdisp8r(dst, CONTEXT, offsetof(m68k_context, current_cycle), CYCLES, SZ_D); 4666 dst = mov_rdisp8r(dst, CONTEXT, offsetof(m68k_context, current_cycle), CYCLES, SZ_D);
4610 dst = mov_rdisp8r(dst, CONTEXT, offsetof(m68k_context, target_cycle), LIMIT, SZ_D); 4667 dst = mov_rdisp8r(dst, CONTEXT, offsetof(m68k_context, target_cycle), LIMIT, SZ_D);
4611 dst = retn(dst); 4668 dst = retn(dst);
4612 4669
4613 opts->start_context = (start_fun)dst; 4670 opts->start_context = (start_fun)dst;
4671 #ifdef X86_64
4614 if (SCRATCH2 != RDI) { 4672 if (SCRATCH2 != RDI) {
4615 dst = mov_rr(dst, RDI, SCRATCH2, SZ_Q); 4673 dst = mov_rr(dst, RDI, SCRATCH2, SZ_PTR);
4616 } 4674 }
4617 //save callee save registers 4675 //save callee save registers
4618 dst = push_r(dst, RBP); 4676 dst = push_r(dst, RBP);
4619 dst = push_r(dst, R12); 4677 dst = push_r(dst, R12);
4620 dst = push_r(dst, R13); 4678 dst = push_r(dst, R13);
4621 dst = push_r(dst, R14); 4679 dst = push_r(dst, R14);
4622 dst = push_r(dst, R15); 4680 dst = push_r(dst, R15);
4681 #else
4682 //save callee save registers
4683 dst = push_r(dst, RBP);
4684 dst = push_r(dst, RBX);
4685 dst = push_r(dst, RSI);
4686 dst = push_r(dst, RDI);
4687
4688 dst = mov_rdisp8r(dst, RSP, 20, SCRATCH2, SZ_D);
4689 dst = mov_rdisp8r(dst, RSP, 24, CONTEXT, SZ_D);
4690 #endif
4623 dst = call(dst, opts->load_context); 4691 dst = call(dst, opts->load_context);
4624 dst = call_r(dst, SCRATCH2); 4692 dst = call_r(dst, SCRATCH2);
4625 dst = call(dst, opts->save_context); 4693 dst = call(dst, opts->save_context);
4694 #ifdef X86_64
4626 //restore callee save registers 4695 //restore callee save registers
4627 dst = pop_r(dst, R15); 4696 dst = pop_r(dst, R15);
4628 dst = pop_r(dst, R14); 4697 dst = pop_r(dst, R14);
4629 dst = pop_r(dst, R13); 4698 dst = pop_r(dst, R13);
4630 dst = pop_r(dst, R12); 4699 dst = pop_r(dst, R12);
4631 dst = pop_r(dst, RBP); 4700 dst = pop_r(dst, RBP);
4701 #else
4702 dst = pop_r(dst, RDI);
4703 dst = pop_r(dst, RSI);
4704 dst = pop_r(dst, RBX);
4705 dst = pop_r(dst, RBP);
4706 #endif
4632 dst = retn(dst); 4707 dst = retn(dst);
4633 4708
4634 opts->native_addr = dst; 4709 opts->native_addr = dst;
4635 dst = call(dst, opts->save_context); 4710 dst = call(dst, opts->save_context);
4636 dst = push_r(dst, CONTEXT); 4711 dst = push_r(dst, CONTEXT);
4637 dst = mov_rr(dst, CONTEXT, RDI, SZ_Q); //move context to 1st arg reg 4712 #ifdef X86_64
4713 dst = mov_rr(dst, CONTEXT, RDI, SZ_PTR); //move context to 1st arg reg
4638 dst = mov_rr(dst, SCRATCH1, RSI, SZ_D); //move address to 2nd arg reg 4714 dst = mov_rr(dst, SCRATCH1, RSI, SZ_D); //move address to 2nd arg reg
4715 #else
4716 dst = push_r(dst, SCRATCH1);
4717 dst = push_r(dst, CONTEXT);
4718 #endif
4639 dst = call(dst, (uint8_t *)get_native_address_trans); 4719 dst = call(dst, (uint8_t *)get_native_address_trans);
4640 dst = mov_rr(dst, RAX, SCRATCH1, SZ_Q); //move result to scratch reg 4720 #ifdef X86_32
4721 dst = add_ir(dst, 8, RSP, SZ_D);
4722 #endif
4723 dst = mov_rr(dst, RAX, SCRATCH1, SZ_PTR); //move result to scratch reg
4641 dst = pop_r(dst, CONTEXT); 4724 dst = pop_r(dst, CONTEXT);
4642 dst = call(dst, opts->load_context); 4725 dst = call(dst, opts->load_context);
4643 dst = retn(dst); 4726 dst = retn(dst);
4644 4727
4645 opts->native_addr_and_sync = dst; 4728 opts->native_addr_and_sync = dst;
4646 dst = call(dst, opts->save_context); 4729 dst = call(dst, opts->save_context);
4647 dst = push_r(dst, SCRATCH1); 4730 dst = push_r(dst, SCRATCH1);
4648 dst = mov_rr(dst, CONTEXT, RDI, SZ_Q); 4731 #ifdef X86_64
4732 dst = mov_rr(dst, CONTEXT, RDI, SZ_PTR);
4649 dst = xor_rr(dst, RSI, RSI, SZ_D); 4733 dst = xor_rr(dst, RSI, RSI, SZ_D);
4650 dst = test_ir(dst, 8, RSP, SZ_Q); //check stack alignment 4734 dst = test_ir(dst, 8, RSP, SZ_PTR); //check stack alignment
4651 uint8_t * do_adjust_rsp = dst+1; 4735 uint8_t * do_adjust_rsp = dst+1;
4652 dst = jcc(dst, CC_NZ, dst+2); 4736 dst = jcc(dst, CC_NZ, dst+2);
4653 dst = call(dst, (uint8_t *)sync_components); 4737 dst = call(dst, (uint8_t *)sync_components);
4654 uint8_t * no_adjust_rsp = dst+1; 4738 uint8_t * no_adjust_rsp = dst+1;
4655 dst = jmp(dst, dst+2); 4739 dst = jmp(dst, dst+2);
4656 *do_adjust_rsp = dst - (do_adjust_rsp+1); 4740 *do_adjust_rsp = dst - (do_adjust_rsp+1);
4657 dst = sub_ir(dst, 8, RSP, SZ_Q); 4741 dst = sub_ir(dst, 8, RSP, SZ_PTR);
4658 dst = call(dst, (uint8_t *)sync_components); 4742 dst = call(dst, (uint8_t *)sync_components);
4659 dst = add_ir(dst, 8, RSP, SZ_Q); 4743 dst = add_ir(dst, 8, RSP, SZ_PTR);
4660 *no_adjust_rsp = dst - (no_adjust_rsp+1); 4744 *no_adjust_rsp = dst - (no_adjust_rsp+1);
4661 dst = pop_r(dst, RSI); 4745 dst = pop_r(dst, RSI);
4662 dst = push_r(dst, RAX); 4746 dst = push_r(dst, RAX);
4663 dst = mov_rr(dst, RAX, RDI, SZ_Q); 4747 dst = mov_rr(dst, RAX, RDI, SZ_PTR);
4664 dst = call(dst, (uint8_t *)get_native_address_trans); 4748 dst = call(dst, (uint8_t *)get_native_address_trans);
4665 dst = mov_rr(dst, RAX, SCRATCH1, SZ_Q); //move result to scratch reg 4749 #else
4750 //TODO: Add support for pushing a constant in gen_x86
4751 dst = xor_rr(dst, RAX, RAX, SZ_D);
4752 dst = push_r(dst, RAX);
4753 dst = push_r(dst, CONTEXT);
4754 dst = call(dst, (uint8_t *)sync_components);
4755 dst = add_ir(dst, 8, RSP, SZ_D);
4756 dst = pop_r(dst, RSI); //restore saved address from SCRATCH1
4757 dst = push_r(dst, RAX); //save context pointer for later
4758 dst = push_r(dst, RSI); //2nd arg -- address
4759 dst = push_r(dst, RAX); //1st arg -- context pointer
4760 dst = call(dst, (uint8_t *)get_native_address_trans);
4761 dst = add_ir(dst, 8, RSP, SZ_D);
4762 #endif
4763
4764 dst = mov_rr(dst, RAX, SCRATCH1, SZ_PTR); //move result to scratch reg
4666 dst = pop_r(dst, CONTEXT); 4765 dst = pop_r(dst, CONTEXT);
4667 dst = call(dst, opts->load_context); 4766 dst = call(dst, opts->load_context);
4668 dst = retn(dst); 4767 dst = retn(dst);
4669 4768
4670 opts->handle_cycle_limit = dst; 4769 opts->handle_cycle_limit = dst;
4673 dst = jcc(dst, CC_C, dst+2); 4772 dst = jcc(dst, CC_C, dst+2);
4674 opts->do_sync = dst; 4773 opts->do_sync = dst;
4675 dst = push_r(dst, SCRATCH1); 4774 dst = push_r(dst, SCRATCH1);
4676 dst = push_r(dst, SCRATCH2); 4775 dst = push_r(dst, SCRATCH2);
4677 dst = call(dst, opts->save_context); 4776 dst = call(dst, opts->save_context);
4678 dst = mov_rr(dst, CONTEXT, RDI, SZ_Q); 4777 #ifdef X86_64
4778 dst = mov_rr(dst, CONTEXT, RDI, SZ_PTR);
4679 dst = xor_rr(dst, RSI, RSI, SZ_D); 4779 dst = xor_rr(dst, RSI, RSI, SZ_D);
4680 dst = test_ir(dst, 8, RSP, SZ_D); 4780 dst = test_ir(dst, 8, RSP, SZ_D);
4681 uint8_t *adjust_rsp = dst+1; 4781 uint8_t *adjust_rsp = dst+1;
4682 dst = jcc(dst, CC_NZ, dst+2); 4782 dst = jcc(dst, CC_NZ, dst+2);
4683 dst = call(dst, (uint8_t *)sync_components); 4783 dst = call(dst, (uint8_t *)sync_components);
4684 uint8_t *no_adjust = dst+1; 4784 uint8_t *no_adjust = dst+1;
4685 dst = jmp(dst, dst+2); 4785 dst = jmp(dst, dst+2);
4686 *adjust_rsp = dst - (adjust_rsp + 1); 4786 *adjust_rsp = dst - (adjust_rsp + 1);
4687 dst = sub_ir(dst, 8, RSP, SZ_Q); 4787 dst = sub_ir(dst, 8, RSP, SZ_PTR);
4688 dst = call(dst, (uint8_t *)sync_components); 4788 dst = call(dst, (uint8_t *)sync_components);
4689 dst = add_ir(dst, 8, RSP, SZ_Q); 4789 dst = add_ir(dst, 8, RSP, SZ_PTR);
4690 *no_adjust = dst - (no_adjust+1); 4790 *no_adjust = dst - (no_adjust+1);
4691 dst = mov_rr(dst, RAX, CONTEXT, SZ_Q); 4791 #else
4792 //TODO: Add support for pushing a constant in gen_x86
4793 dst = xor_rr(dst, RAX, RAX, SZ_D);
4794 dst = push_r(dst, RAX);
4795 dst = push_r(dst, CONTEXT);
4796 dst = call(dst, (uint8_t *)sync_components);
4797 dst = add_ir(dst, 8, RSP, SZ_D);
4798 #endif
4799 dst = mov_rr(dst, RAX, CONTEXT, SZ_PTR);
4692 dst = call(dst, opts->load_context); 4800 dst = call(dst, opts->load_context);
4693 dst = pop_r(dst, SCRATCH2); 4801 dst = pop_r(dst, SCRATCH2);
4694 dst = pop_r(dst, SCRATCH1); 4802 dst = pop_r(dst, SCRATCH1);
4695 *skip_sync = dst - (skip_sync+1); 4803 *skip_sync = dst - (skip_sync+1);
4696 dst = retn(dst); 4804 dst = retn(dst);
4804 dst = jcc(dst, CC_NC, dst+2); 4912 dst = jcc(dst, CC_NC, dst+2);
4805 dst = cmp_rdisp8r(dst, CONTEXT, offsetof(m68k_context, sync_cycle), CYCLES, SZ_D); 4913 dst = cmp_rdisp8r(dst, CONTEXT, offsetof(m68k_context, sync_cycle), CYCLES, SZ_D);
4806 skip_sync = dst+1; 4914 skip_sync = dst+1;
4807 dst = jcc(dst, CC_C, dst+2); 4915 dst = jcc(dst, CC_C, dst+2);
4808 dst = call(dst, opts->save_context); 4916 dst = call(dst, opts->save_context);
4809 dst = mov_rr(dst, CONTEXT, RDI, SZ_Q); 4917 #ifdef X86_64
4918 dst = mov_rr(dst, CONTEXT, RDI, SZ_PTR);
4810 dst = mov_rr(dst, SCRATCH1, RSI, SZ_D); 4919 dst = mov_rr(dst, SCRATCH1, RSI, SZ_D);
4811 dst = test_ir(dst, 8, RSP, SZ_D); 4920 dst = test_ir(dst, 8, RSP, SZ_D);
4812 adjust_rsp = dst+1; 4921 adjust_rsp = dst+1;
4813 dst = jcc(dst, CC_NZ, dst+2); 4922 dst = jcc(dst, CC_NZ, dst+2);
4814 dst = call(dst, (uint8_t *)sync_components); 4923 dst = call(dst, (uint8_t *)sync_components);
4815 no_adjust = dst+1; 4924 no_adjust = dst+1;
4816 dst = jmp(dst, dst+2); 4925 dst = jmp(dst, dst+2);
4817 *adjust_rsp = dst - (adjust_rsp + 1); 4926 *adjust_rsp = dst - (adjust_rsp + 1);
4818 dst = sub_ir(dst, 8, RSP, SZ_Q); 4927 dst = sub_ir(dst, 8, RSP, SZ_PTR);
4819 dst = call(dst, (uint8_t *)sync_components); 4928 dst = call(dst, (uint8_t *)sync_components);
4820 dst = add_ir(dst, 8, RSP, SZ_Q); 4929 dst = add_ir(dst, 8, RSP, SZ_PTR);
4821 *no_adjust = dst - (no_adjust+1); 4930 *no_adjust = dst - (no_adjust+1);
4822 dst = mov_rr(dst, RAX, CONTEXT, SZ_Q); 4931 #else
4932 dst = push_r(dst, SCRATCH1);
4933 dst = push_r(dst, CONTEXT);
4934 dst = call(dst, (uint8_t *)sync_components);
4935 dst = add_ir(dst, 8, RSP, SZ_D);
4936 #endif
4937 dst = mov_rr(dst, RAX, CONTEXT, SZ_PTR);
4823 dst = jmp(dst, opts->load_context); 4938 dst = jmp(dst, opts->load_context);
4824 *skip_sync = dst - (skip_sync+1); 4939 *skip_sync = dst - (skip_sync+1);
4825 dst = retn(dst); 4940 dst = retn(dst);
4826 *do_int = dst - (do_int+1); 4941 *do_int = dst - (do_int+1);
4827 //set target cycle to sync cycle 4942 //set target cycle to sync cycle