comparison backend.c @ 2053:3414a4423de1 segacd

Merge from default
author Michael Pavone <pavone@retrodev.com>
date Sat, 15 Jan 2022 13:15:21 -0800
parents 45c4b74e7676
children 638eb2d25696 0013362c320c
comparison
equal deleted inserted replaced
1692:5dacaef602a7 2053:3414a4423de1
54 memmap_chunk const *find_map_chunk(uint32_t address, cpu_options *opts, uint16_t flags, uint32_t *size_sum) 54 memmap_chunk const *find_map_chunk(uint32_t address, cpu_options *opts, uint16_t flags, uint32_t *size_sum)
55 { 55 {
56 if (size_sum) { 56 if (size_sum) {
57 *size_sum = 0; 57 *size_sum = 0;
58 } 58 }
59 uint32_t minsize;
60 if (flags == MMAP_CODE) {
61 minsize = 1 << (opts->ram_flags_shift + 3);
62 } else {
63 minsize = 0;
64 }
59 address &= opts->address_mask; 65 address &= opts->address_mask;
60 for (memmap_chunk const *cur = opts->memmap, *end = opts->memmap + opts->memmap_chunks; cur != end; cur++) 66 for (memmap_chunk const *cur = opts->memmap, *end = opts->memmap + opts->memmap_chunks; cur != end; cur++)
61 { 67 {
62 if (address >= cur->start && address < cur->end) { 68 if (address >= cur->start && address < cur->end) {
63 return cur; 69 return cur;
64 } else if (size_sum && (cur->flags & flags) == flags) { 70 } else if (size_sum && (cur->flags & flags) == flags) {
65 *size_sum += chunk_size(opts, cur); 71 uint32_t size = chunk_size(opts, cur);
72 if (size < minsize) {
73 size = minsize;
74 }
75 *size_sum += size;
66 } 76 }
67 } 77 }
68 return NULL; 78 return NULL;
69 } 79 }
70 80
81 uint8_t * base = memmap[chunk].flags & MMAP_PTR_IDX 91 uint8_t * base = memmap[chunk].flags & MMAP_PTR_IDX
82 ? mem_pointers[memmap[chunk].ptr_index] 92 ? mem_pointers[memmap[chunk].ptr_index]
83 : memmap[chunk].buffer; 93 : memmap[chunk].buffer;
84 if (!base) { 94 if (!base) {
85 if (memmap[chunk].flags & MMAP_AUX_BUFF) { 95 if (memmap[chunk].flags & MMAP_AUX_BUFF) {
86 return memmap[chunk].buffer + (address & memmap[chunk].aux_mask); 96 return ((uint8_t *)memmap[chunk].buffer) + (address & memmap[chunk].aux_mask);
87 } 97 }
88 return NULL; 98 return NULL;
89 } 99 }
90 return base + (address & memmap[chunk].mask); 100 return base + (address & memmap[chunk].mask);
91 } 101 }
92 } 102 }
93 return NULL; 103 return NULL;
94 } 104 }
95 105
106 void * get_native_write_pointer(uint32_t address, void ** mem_pointers, cpu_options * opts)
107 {
108 memmap_chunk const * memmap = opts->memmap;
109 address &= opts->address_mask;
110 for (uint32_t chunk = 0; chunk < opts->memmap_chunks; chunk++)
111 {
112 if (address >= memmap[chunk].start && address < memmap[chunk].end) {
113 if (!(memmap[chunk].flags & (MMAP_WRITE))) {
114 return NULL;
115 }
116 uint8_t * base = memmap[chunk].flags & MMAP_PTR_IDX
117 ? mem_pointers[memmap[chunk].ptr_index]
118 : memmap[chunk].buffer;
119 if (!base) {
120 if (memmap[chunk].flags & MMAP_AUX_BUFF) {
121 return ((uint8_t *)memmap[chunk].buffer) + (address & memmap[chunk].aux_mask);
122 }
123 return NULL;
124 }
125 return base + (address & memmap[chunk].mask);
126 }
127 }
128 return NULL;
129 }
130
96 uint16_t read_word(uint32_t address, void **mem_pointers, cpu_options *opts, void *context) 131 uint16_t read_word(uint32_t address, void **mem_pointers, cpu_options *opts, void *context)
97 { 132 {
98 memmap_chunk const *chunk = find_map_chunk(address, opts, 0, NULL); 133 memmap_chunk const *chunk = find_map_chunk(address, opts, 0, NULL);
99 if (!chunk) { 134 if (!chunk) {
100 return 0xFFFF; 135 return 0xFFFF;
101 } 136 }
102 uint32_t offset = (address - chunk->start) & chunk->mask; 137 uint32_t offset = address & chunk->mask;
103 if (chunk->flags & MMAP_READ) { 138 if (chunk->flags & MMAP_READ) {
104 uint8_t *base; 139 uint8_t *base;
105 if (chunk->flags & MMAP_PTR_IDX) { 140 if (chunk->flags & MMAP_PTR_IDX) {
106 base = mem_pointers[chunk->ptr_index]; 141 base = mem_pointers[chunk->ptr_index];
107 } else { 142 } else {
127 return chunk->read_16(offset, context); 162 return chunk->read_16(offset, context);
128 } 163 }
129 return 0xFFFF; 164 return 0xFFFF;
130 } 165 }
131 166
167 void write_word(uint32_t address, uint16_t value, void **mem_pointers, cpu_options *opts, void *context)
168 {
169 memmap_chunk const *chunk = find_map_chunk(address, opts, 0, NULL);
170 if (!chunk) {
171 return;
172 }
173 uint32_t offset = address & chunk->mask;
174 if (chunk->flags & MMAP_WRITE) {
175 uint8_t *base;
176 if (chunk->flags & MMAP_PTR_IDX) {
177 base = mem_pointers[chunk->ptr_index];
178 } else {
179 base = chunk->buffer;
180 }
181 if (base) {
182 if ((chunk->flags & MMAP_ONLY_ODD) || (chunk->flags & MMAP_ONLY_EVEN)) {
183 offset /= 2;
184 if (chunk->flags & MMAP_ONLY_EVEN) {
185 value >>= 16;
186 }
187 base[offset] = value;
188 } else {
189 *(uint16_t *)(base + offset) = value;
190 }
191 return;
192 }
193 }
194 if ((!(chunk->flags & MMAP_WRITE) || (chunk->flags & MMAP_FUNC_NULL)) && chunk->write_16) {
195 chunk->write_16(offset, context, value);
196 }
197 }
198
199 uint8_t read_byte(uint32_t address, void **mem_pointers, cpu_options *opts, void *context)
200 {
201 memmap_chunk const *chunk = find_map_chunk(address, opts, 0, NULL);
202 if (!chunk) {
203 return 0xFF;
204 }
205 uint32_t offset = address & chunk->mask;
206 if (chunk->flags & MMAP_READ) {
207 uint8_t *base;
208 if (chunk->flags & MMAP_PTR_IDX) {
209 base = mem_pointers[chunk->ptr_index];
210 } else {
211 base = chunk->buffer;
212 }
213 if (base) {
214 if ((chunk->flags & MMAP_ONLY_ODD) || (chunk->flags & MMAP_ONLY_EVEN)) {
215 if (address & 1) {
216 if (chunk->flags & MMAP_ONLY_EVEN) {
217 return 0xFF;
218 }
219 } else if (chunk->flags & MMAP_ONLY_ODD) {
220 return 0xFF;
221 }
222 offset /= 2;
223 } else if(opts->byte_swap) {
224 offset ^= 1;
225 }
226 return base[offset];
227 }
228 }
229 if ((!(chunk->flags & MMAP_READ) || (chunk->flags & MMAP_FUNC_NULL)) && chunk->read_8) {
230 return chunk->read_8(offset, context);
231 }
232 return 0xFF;
233 }
234
235 void write_byte(uint32_t address, uint8_t value, void **mem_pointers, cpu_options *opts, void *context)
236 {
237 memmap_chunk const *chunk = find_map_chunk(address, opts, 0, NULL);
238 if (!chunk) {
239 return;
240 }
241 uint32_t offset = address & chunk->mask;
242 if (chunk->flags & MMAP_WRITE) {
243 uint8_t *base;
244 if (chunk->flags & MMAP_PTR_IDX) {
245 base = mem_pointers[chunk->ptr_index];
246 } else {
247 base = chunk->buffer;
248 }
249 if (base) {
250 if ((chunk->flags & MMAP_ONLY_ODD) || (chunk->flags & MMAP_ONLY_EVEN)) {
251 if (address & 1) {
252 if (chunk->flags & MMAP_ONLY_EVEN) {
253 return;
254 }
255 } else if (chunk->flags & MMAP_ONLY_ODD) {
256 return;
257 }
258 offset /= 2;
259 } else if(opts->byte_swap) {
260 offset ^= 1;
261 }
262 base[offset] = value;
263 }
264 }
265 if ((!(chunk->flags & MMAP_WRITE) || (chunk->flags & MMAP_FUNC_NULL)) && chunk->write_8) {
266 chunk->write_8(offset, context, value);
267 }
268 }
269
132 uint32_t chunk_size(cpu_options *opts, memmap_chunk const *chunk) 270 uint32_t chunk_size(cpu_options *opts, memmap_chunk const *chunk)
133 { 271 {
134 if (chunk->mask == opts->address_mask) { 272 if (chunk->mask == opts->address_mask) {
135 return chunk->end - chunk->start; 273 return chunk->end - chunk->start;
136 } else { 274 } else {
139 } 277 }
140 278
141 uint32_t ram_size(cpu_options *opts) 279 uint32_t ram_size(cpu_options *opts)
142 { 280 {
143 uint32_t size = 0; 281 uint32_t size = 0;
282 uint32_t minsize = 1 << (opts->ram_flags_shift + 3);
144 for (int i = 0; i < opts->memmap_chunks; i++) 283 for (int i = 0; i < opts->memmap_chunks; i++)
145 { 284 {
146 if (opts->memmap[i].flags & MMAP_CODE) { 285 if (opts->memmap[i].flags & MMAP_CODE) {
147 if (opts->memmap[i].mask == opts->address_mask) { 286 uint32_t cursize = chunk_size(opts, opts->memmap + i);
148 size += opts->memmap[i].end - opts->memmap[i].start; 287 if (cursize < minsize) {
288 size += minsize;
149 } else { 289 } else {
150 size += opts->memmap[i].mask + 1; 290 size += cursize;
151 } 291 }
152 } 292 }
153 } 293 }
154 return size; 294 return size;
155 } 295 }