comparison z80_util.c @ 2053:3414a4423de1 segacd

Merge from default
author Michael Pavone <pavone@retrodev.com>
date Sat, 15 Jan 2022 13:15:21 -0800
parents 72540af9c90a
children dbff641a33df
comparison
equal deleted inserted replaced
1692:5dacaef602a7 2053:3414a4423de1
1 #include <string.h>
2
3 void z80_read_8(z80_context *context)
4 {
5 context->cycles += 3 * context->opts->gen.clock_divider;
6 uint8_t *fast = context->fastread[context->scratch1 >> 10];
7 if (fast) {
8 context->scratch1 = fast[context->scratch1 & 0x3FF];
9 } else {
10 context->scratch1 = read_byte(context->scratch1, (void **)context->mem_pointers, &context->opts->gen, context);
11 }
12 }
13
14 void z80_write_8(z80_context *context)
15 {
16 context->cycles += 3 * context->opts->gen.clock_divider;
17 uint8_t *fast = context->fastwrite[context->scratch2 >> 10];
18 if (fast) {
19 fast[context->scratch2 & 0x3FF] = context->scratch1;
20 } else {
21 write_byte(context->scratch2, context->scratch1, (void **)context->mem_pointers, &context->opts->gen, context);
22 }
23 }
24
25 void z80_io_read8(z80_context *context)
26 {
27 uint32_t tmp_mask = context->opts->gen.address_mask;
28 memmap_chunk const *tmp_map = context->opts->gen.memmap;
29 uint32_t tmp_chunks = context->opts->gen.memmap_chunks;
30
31 context->opts->gen.address_mask = context->io_mask;
32 context->opts->gen.memmap = context->io_map;
33 context->opts->gen.memmap_chunks = context->io_chunks;
34
35 context->cycles += 4 * context->opts->gen.clock_divider;
36 context->scratch1 = read_byte(context->scratch1, (void **)context->mem_pointers, &context->opts->gen, context);
37
38 context->opts->gen.address_mask = tmp_mask;
39 context->opts->gen.memmap = tmp_map;
40 context->opts->gen.memmap_chunks = tmp_chunks;
41 }
42
43 void z80_io_write8(z80_context *context)
44 {
45 uint32_t tmp_mask = context->opts->gen.address_mask;
46 memmap_chunk const *tmp_map = context->opts->gen.memmap;
47 uint32_t tmp_chunks = context->opts->gen.memmap_chunks;
48
49 context->opts->gen.address_mask = context->io_mask;
50 context->opts->gen.memmap = context->io_map;
51 context->opts->gen.memmap_chunks = context->io_chunks;
52
53 context->cycles += 4 * context->opts->gen.clock_divider;
54 write_byte(context->scratch2, context->scratch1, (void **)context->mem_pointers, &context->opts->gen, context);
55
56 context->opts->gen.address_mask = tmp_mask;
57 context->opts->gen.memmap = tmp_map;
58 context->opts->gen.memmap_chunks = tmp_chunks;
59 }
60
61 //quick hack until I get a chance to change which init method these get passed to
62 static memmap_chunk const * tmp_io_chunks;
63 static uint32_t tmp_num_io_chunks, tmp_io_mask;
64 void init_z80_opts(z80_options * options, memmap_chunk const * chunks, uint32_t num_chunks, memmap_chunk const * io_chunks, uint32_t num_io_chunks, uint32_t clock_divider, uint32_t io_address_mask)
65 {
66 memset(options, 0, sizeof(*options));
67 options->gen.memmap = chunks;
68 options->gen.memmap_chunks = num_chunks;
69 options->gen.address_mask = 0xFFFF;
70 options->gen.max_address = 0xFFFF;
71 options->gen.clock_divider = clock_divider;
72 tmp_io_chunks = io_chunks;
73 tmp_num_io_chunks = num_io_chunks;
74 tmp_io_mask = io_address_mask;
75 }
76
77 void z80_options_free(z80_options *opts)
78 {
79 free(opts);
80 }
81
82 z80_context * init_z80_context(z80_options *options)
83 {
84 z80_context *context = calloc(1, sizeof(z80_context));
85 context->opts = options;
86 context->io_map = (memmap_chunk *)tmp_io_chunks;
87 context->io_chunks = tmp_num_io_chunks;
88 context->io_mask = tmp_io_mask;
89 context->int_cycle = context->int_end_cycle = context->nmi_cycle = 0xFFFFFFFFU;
90 z80_invalidate_code_range(context, 0, 0xFFFF);
91 return context;
92 }
93
94 void z80_sync_cycle(z80_context *context, uint32_t target_cycle)
95 {
96 if (context->iff1 && context->int_cycle < target_cycle) {
97 if (context->cycles > context->int_end_cycle) {
98 context->int_cycle = 0xFFFFFFFFU;
99 } else {
100 target_cycle = context->int_cycle;
101 }
102 };
103 if (context->nmi_cycle < target_cycle) {
104 target_cycle = context->nmi_cycle;
105 }
106 context->sync_cycle = target_cycle;
107 }
108
109 void z80_run(z80_context *context, uint32_t target_cycle)
110 {
111 if (context->reset || context->busack) {
112 context->cycles = target_cycle;
113 } else if (target_cycle > context->cycles) {
114 if (context->busreq) {
115 //busreq is sampled at the end of an m-cycle
116 //we can approximate that by running for a single m-cycle after a bus request
117 target_cycle = context->cycles + 4 * context->opts->gen.clock_divider;
118 }
119 z80_execute(context, target_cycle);
120 if (context->busreq) {
121 context->busack = 1;
122 }
123 }
124 }
125
126 void z80_assert_reset(z80_context * context, uint32_t cycle)
127 {
128 z80_run(context, cycle);
129 context->reset = 1;
130 }
131
132 void z80_clear_reset(z80_context * context, uint32_t cycle)
133 {
134 z80_run(context, cycle);
135 if (context->reset) {
136 context->imode = 0;
137 context->iff1 = context->iff2 = 0;
138 context->pc = 0;
139 context->reset = 0;
140 if (context->busreq) {
141 //TODO: Figure out appropriate delay
142 context->busack = 1;
143 }
144 }
145 }
146
147 #define MAX_MCYCLE_LENGTH 6
148 void z80_assert_busreq(z80_context * context, uint32_t cycle)
149 {
150 z80_run(context, cycle);
151 context->busreq = 1;
152 //this is an imperfect aproximation since most M-cycles take less tstates than the max
153 //and a short 3-tstate m-cycle can take an unbounded number due to wait states
154 if (context->cycles - cycle > MAX_MCYCLE_LENGTH * context->opts->gen.clock_divider) {
155 context->busack = 1;
156 }
157 }
158
159 void z80_clear_busreq(z80_context * context, uint32_t cycle)
160 {
161 z80_run(context, cycle);
162 context->busreq = 0;
163 context->busack = 0;
164 //there appears to be at least a 1 Z80 cycle delay between busreq
165 //being released and resumption of execution
166 context->cycles += context->opts->gen.clock_divider;
167 }
168
169 void z80_assert_nmi(z80_context *context, uint32_t cycle)
170 {
171 context->nmi_cycle = cycle;
172 }
173
174 uint8_t z80_get_busack(z80_context * context, uint32_t cycle)
175 {
176 z80_run(context, cycle);
177 return context->busack;
178 }
179
180 void z80_invalidate_code_range(z80_context *context, uint32_t startA, uint32_t endA)
181 {
182 for(startA &= ~0x3FF; startA < endA; startA += 1024)
183 {
184 uint8_t *start = get_native_pointer(startA, (void**)context->mem_pointers, &context->opts->gen);
185 if (start) {
186 uint8_t *end = get_native_pointer(startA + 1023, (void**)context->mem_pointers, &context->opts->gen);
187 if (!end || end - start != 1023) {
188 start = NULL;
189 }
190 }
191 context->fastread[startA >> 10] = start;
192 start = get_native_write_pointer(startA, (void**)context->mem_pointers, &context->opts->gen);
193 if (start) {
194 uint8_t *end = get_native_write_pointer(startA + 1023, (void**)context->mem_pointers, &context->opts->gen);
195 if (!end || end - start != 1023) {
196 start = NULL;
197 }
198 }
199 context->fastwrite[startA >> 10] = start;
200 }
201 }
202
203 void z80_adjust_cycles(z80_context * context, uint32_t deduction)
204 {
205 context->cycles -= deduction;
206 if (context->int_cycle != 0xFFFFFFFFU) {
207 if (context->int_cycle > deduction) {
208 context->int_cycle -= deduction;
209 } else {
210 context->int_cycle = 0;
211 }
212 }
213 if (context->int_end_cycle != 0xFFFFFFFFU) {
214 if (context->int_end_cycle > deduction) {
215 context->int_end_cycle -= deduction;
216 } else {
217 context->int_end_cycle = 0;
218 }
219 }
220 if (context->nmi_cycle != 0xFFFFFFFFU) {
221 if (context->nmi_cycle > deduction) {
222 context->nmi_cycle -= deduction;
223 } else {
224 context->nmi_cycle = 0;
225 }
226 }
227 }
228
229 void z80_serialize(z80_context *context, serialize_buffer *buf)
230 {
231 save_int8(buf, context->main[1]);//C
232 save_int8(buf, context->main[0]);//B
233 save_int8(buf, context->main[3]);//E
234 save_int8(buf, context->main[2]);//D
235 save_int8(buf, context->main[5]);//L
236 save_int8(buf, context->main[4]);//H
237 save_int8(buf, context->ix);//IXL
238 save_int8(buf, context->ix >> 8);//IXH
239 save_int8(buf, context->iy);//IYL
240 save_int8(buf, context->iy >> 8);//IYH
241 save_int8(buf, context->i);
242 save_int8(buf, (context->rhigh & 0x80) | (context->r & 0x7F));
243 save_int8(buf, context->main[7]);//A
244 uint8_t f = context->last_flag_result & 0xA8
245 | (context->zflag ? 0x40 : 0)
246 | (context->chflags & 8 ? 0x10 : 0)
247 | (context->pvflag ? 4 : 0)
248 | (context->nflag ? 2 : 0)
249 | (context->chflags & 0x80 ? 1 : 0);
250 save_int8(buf, f);
251 save_int8(buf, context->alt[1]);//C
252 save_int8(buf, context->alt[0]);//B
253 save_int8(buf, context->alt[3]);//E
254 save_int8(buf, context->alt[2]);//D
255 save_int8(buf, context->alt[5]);//L
256 save_int8(buf, context->alt[4]);//H
257 save_int8(buf, 0);//non-existant alt ixl
258 save_int8(buf, 0);//non-existant alt ixh
259 save_int8(buf, 0);//non-existant alt iyl
260 save_int8(buf, 0);//non-existant alt iyh
261 save_int8(buf, 0);//non-existant alt i
262 save_int8(buf, 0);//non-existant alt r
263 save_int8(buf, context->alt[7]);//A
264 save_int8(buf, context->alt[6]);//F
265
266 save_int16(buf, context->pc);
267 save_int16(buf, context->sp);
268 save_int8(buf, context->imode);
269 save_int8(buf, context->iff1);
270 save_int8(buf, context->iff2);
271 uint8_t is_nmi = context->nmi_cycle != 0xFFFFFFFF && (context->nmi_cycle < context->int_cycle || !context->iff1);
272 save_int8(buf, is_nmi);//int_is_nmi
273 save_int8(buf, context->busack);
274 save_int32(buf, context->cycles);
275 save_int32(buf, is_nmi ? context->nmi_cycle : context->int_cycle);//int_cycle
276 save_int32(buf, 0);//int_enable_cycle
277 save_int32(buf, context->int_cycle);
278 save_int32(buf, context->int_end_cycle);
279 save_int32(buf, context->nmi_cycle);
280 }
281
282 void z80_deserialize(deserialize_buffer *buf, void *vcontext)
283 {
284 z80_context *context = vcontext;
285 context->main[1] = load_int8(buf);//C
286 context->main[0] = load_int8(buf);//B
287 context->main[3] = load_int8(buf);//E
288 context->main[2] = load_int8(buf);//D
289 context->main[5] = load_int8(buf);//L
290 context->main[4] = load_int8(buf);//H
291 context->ix = load_int8(buf);//IXL
292 context->ix |= load_int8(buf) << 8;//IXH
293 context->iy = load_int8(buf);//IYL
294 context->iy |= load_int8(buf) << 8;//IYH
295 context->i = load_int8(buf);
296 context->r = load_int8(buf);
297 context->rhigh = context->r & 0x80;
298 context->main[7] = load_int8(buf);//A
299 context->last_flag_result = load_int8(buf);
300 context->zflag = context->last_flag_result & 0x40;
301 context->chflags = context->last_flag_result & 0x10 ? 8 : 0;
302 context->pvflag = context->last_flag_result & 4;
303 context->nflag = context->last_flag_result & 2;
304 context->chflags |= context->last_flag_result & 1 ? 0x80 : 0;
305 context->alt[1] = load_int8(buf);//C
306 context->alt[0] = load_int8(buf);//B
307 context->alt[3] = load_int8(buf);//E
308 context->alt[2] = load_int8(buf);//D
309 context->alt[5] = load_int8(buf);//L
310 context->alt[4] = load_int8(buf);//H
311 load_int8(buf);//non-existant alt ixl
312 load_int8(buf);//non-existant alt ixh
313 load_int8(buf);//non-existant alt iyl
314 load_int8(buf);//non-existant alt iyh
315 load_int8(buf);//non-existant alt i
316 load_int8(buf);//non-existant alt r
317 context->alt[7] = load_int8(buf);//A
318 context->alt[6] = load_int8(buf);//F
319
320 context->pc = load_int16(buf);
321 context->sp = load_int16(buf);
322 context->imode = load_int8(buf);
323 context->iff1 = load_int8(buf);
324 context->iff2 = load_int8(buf);
325 load_int8(buf);//int_is_nmi
326 context->busack = load_int8(buf);
327 context->cycles = load_int32(buf);
328 load_int32(buf);//int_cycle
329 load_int32(buf);//int_enable_cycle
330 context->int_cycle = load_int32(buf);
331 context->int_end_cycle = load_int32(buf);
332 context->nmi_cycle = load_int32(buf);
333 }
334
335 void zinsert_breakpoint(z80_context * context, uint16_t address, uint8_t * bp_handler)
336 {
337 }
338
339 void zremove_breakpoint(z80_context * context, uint16_t address)
340 {
341 }