1 /*-
2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3 *
4 * Copyright (c) 2006 Semihalf, Rafal Jaworowski <raj@semihalf.com>
5 * Copyright (c) 1996, 1997, 1998 The NetBSD Foundation, Inc.
6 * All rights reserved.
7 *
8 * This code is derived from software contributed to The NetBSD Foundation
9 * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
10 * NASA Ames Research Center.
11 *
12 * Redistribution and use in source and binary forms, with or without
13 * modification, are permitted provided that the following conditions
14 * are met:
15 * 1. Redistributions of source code must retain the above copyright
16 * notice, this list of conditions and the following disclaimer.
17 * 2. Redistributions in binary form must reproduce the above copyright
18 * notice, this list of conditions and the following disclaimer in the
19 * documentation and/or other materials provided with the distribution.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
22 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
23 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
24 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
25 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 * POSSIBILITY OF SUCH DAMAGE.
32 */
33
34 #include <sys/cdefs.h>
35 __FBSDID("$FreeBSD$");
36
37 #define KTR_BE_IO 0
38 #define KTR_LE_IO 0
39
40 #include <sys/param.h>
41 #include <sys/systm.h>
42 #include <sys/bus.h>
43 #include <sys/ktr.h>
44 #include <vm/vm.h>
45 #include <vm/pmap.h>
46 #include <sys/endian.h>
47
48 #include <machine/bus.h>
49 #include <machine/pio.h>
50 #include <machine/md_var.h>
51
52 #define TODO panic("%s: not implemented", __func__)
53
54 #define MAX_EARLYBOOT_MAPPINGS 6
55
56 static struct {
57 vm_offset_t virt;
58 bus_addr_t addr;
59 bus_size_t size;
60 int flags;
61 } earlyboot_mappings[MAX_EARLYBOOT_MAPPINGS];
62 static int earlyboot_map_idx = 0;
63
64 void bs_remap_earlyboot(void);
65
66 static __inline void *
67 __ppc_ba(bus_space_handle_t bsh, bus_size_t ofs)
68 {
69 return ((void *)(bsh + ofs));
70 }
71
72 static int
73 bs_gen_map(bus_addr_t addr, bus_size_t size, int flags,
74 bus_space_handle_t *bshp)
75 {
76 vm_memattr_t ma;
77
78 /*
79 * Record what we did if we haven't enabled the MMU yet. We
80 * will need to remap it as soon as the MMU comes up.
81 */
82 if (!pmap_bootstrapped) {
83 KASSERT(earlyboot_map_idx < MAX_EARLYBOOT_MAPPINGS,
84 ("%s: too many early boot mapping requests", __func__));
85 earlyboot_mappings[earlyboot_map_idx].addr = addr;
86 earlyboot_mappings[earlyboot_map_idx].virt =
87 pmap_early_io_map(addr, size);
88 earlyboot_mappings[earlyboot_map_idx].size = size;
89 earlyboot_mappings[earlyboot_map_idx].flags = flags;
90 *bshp = earlyboot_mappings[earlyboot_map_idx].virt;
91 earlyboot_map_idx++;
92 } else {
93 ma = VM_MEMATTR_DEFAULT;
94 switch (flags) {
95 case BUS_SPACE_MAP_CACHEABLE:
96 ma = VM_MEMATTR_CACHEABLE;
97 break;
98 case BUS_SPACE_MAP_PREFETCHABLE:
99 ma = VM_MEMATTR_PREFETCHABLE;
100 break;
101 }
102 *bshp = (bus_space_handle_t)pmap_mapdev_attr(addr, size, ma);
103 }
104
105 return (0);
106 }
107
108 void
109 bs_remap_earlyboot(void)
110 {
111 vm_paddr_t pa, spa;
112 vm_offset_t va;
113 int i;
114 vm_memattr_t ma;
115
116 for (i = 0; i < earlyboot_map_idx; i++) {
117 spa = earlyboot_mappings[i].addr;
118
119 if (hw_direct_map &&
120 PHYS_TO_DMAP(spa) == earlyboot_mappings[i].virt &&
121 pmap_dev_direct_mapped(spa, earlyboot_mappings[i].size) == 0)
122 continue;
123
124 ma = VM_MEMATTR_DEFAULT;
125 switch (earlyboot_mappings[i].flags) {
126 case BUS_SPACE_MAP_CACHEABLE:
127 ma = VM_MEMATTR_CACHEABLE;
128 break;
129 case BUS_SPACE_MAP_PREFETCHABLE:
130 ma = VM_MEMATTR_PREFETCHABLE;
131 break;
132 }
133
134 pa = trunc_page(spa);
135 va = trunc_page(earlyboot_mappings[i].virt);
136 while (pa < spa + earlyboot_mappings[i].size) {
137 pmap_kenter_attr(va, pa, ma);
138 va += PAGE_SIZE;
139 pa += PAGE_SIZE;
140 }
141 }
142 }
143
144 static void
145 bs_gen_unmap(bus_size_t size __unused)
146 {
147 }
148
149 static int
150 bs_gen_subregion(bus_space_handle_t bsh, bus_size_t ofs,
151 bus_size_t size __unused, bus_space_handle_t *nbshp)
152 {
153 *nbshp = bsh + ofs;
154 return (0);
155 }
156
157 static int
158 bs_gen_alloc(bus_addr_t rstart __unused, bus_addr_t rend __unused,
159 bus_size_t size __unused, bus_size_t alignment __unused,
160 bus_size_t boundary __unused, int flags __unused,
161 bus_addr_t *bpap __unused, bus_space_handle_t *bshp __unused)
162 {
163 TODO;
164 }
165
166 static void
167 bs_gen_free(bus_space_handle_t bsh __unused, bus_size_t size __unused)
168 {
169 TODO;
170 }
171
172 static void
173 bs_gen_barrier(bus_space_handle_t bsh __unused, bus_size_t ofs __unused,
174 bus_size_t size __unused, int flags __unused)
175 {
176
177 powerpc_iomb();
178 }
179
180 /*
181 * Native-endian access functions
182 */
183 static uint8_t
184 native_bs_rs_1(bus_space_handle_t bsh, bus_size_t ofs)
185 {
186 volatile uint8_t *addr;
187 uint8_t res;
188
189 addr = __ppc_ba(bsh, ofs);
190 res = *addr;
191 powerpc_iomb();
192 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
193 return (res);
194 }
195
196 static uint16_t
197 native_bs_rs_2(bus_space_handle_t bsh, bus_size_t ofs)
198 {
199 volatile uint16_t *addr;
200 uint16_t res;
201
202 addr = __ppc_ba(bsh, ofs);
203 res = *addr;
204 powerpc_iomb();
205 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
206 return (res);
207 }
208
209 static uint32_t
210 native_bs_rs_4(bus_space_handle_t bsh, bus_size_t ofs)
211 {
212 volatile uint32_t *addr;
213 uint32_t res;
214
215 addr = __ppc_ba(bsh, ofs);
216 res = *addr;
217 powerpc_iomb();
218 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
219 return (res);
220 }
221
222 static uint64_t
223 native_bs_rs_8(bus_space_handle_t bsh, bus_size_t ofs)
224 {
225 volatile uint64_t *addr;
226 uint64_t res;
227
228 addr = __ppc_ba(bsh, ofs);
229 res = *addr;
230 powerpc_iomb();
231 return (res);
232 }
233
234 static void
235 native_bs_rm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
236 {
237 ins8(__ppc_ba(bsh, ofs), addr, cnt);
238 }
239
240 static void
241 native_bs_rm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
242 {
243 ins16(__ppc_ba(bsh, ofs), addr, cnt);
244 }
245
246 static void
247 native_bs_rm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
248 {
249 ins32(__ppc_ba(bsh, ofs), addr, cnt);
250 }
251
252 static void
253 native_bs_rm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
254 {
255 ins64(__ppc_ba(bsh, ofs), addr, cnt);
256 }
257
258 static void
259 native_bs_rr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
260 {
261 volatile uint8_t *s = __ppc_ba(bsh, ofs);
262
263 while (cnt--)
264 *addr++ = *s++;
265 powerpc_iomb();
266 }
267
268 static void
269 native_bs_rr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
270 {
271 volatile uint16_t *s = __ppc_ba(bsh, ofs);
272
273 while (cnt--)
274 *addr++ = *s++;
275 powerpc_iomb();
276 }
277
278 static void
279 native_bs_rr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
280 {
281 volatile uint32_t *s = __ppc_ba(bsh, ofs);
282
283 while (cnt--)
284 *addr++ = *s++;
285 powerpc_iomb();
286 }
287
288 static void
289 native_bs_rr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
290 {
291 volatile uint64_t *s = __ppc_ba(bsh, ofs);
292
293 while (cnt--)
294 *addr++ = *s++;
295 powerpc_iomb();
296 }
297
298 static void
299 native_bs_ws_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val)
300 {
301 volatile uint8_t *addr;
302
303 addr = __ppc_ba(bsh, ofs);
304 *addr = val;
305 powerpc_iomb();
306 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
307 }
308
309 static void
310 native_bs_ws_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val)
311 {
312 volatile uint16_t *addr;
313
314 addr = __ppc_ba(bsh, ofs);
315 *addr = val;
316 powerpc_iomb();
317 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
318 }
319
320 static void
321 native_bs_ws_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val)
322 {
323 volatile uint32_t *addr;
324
325 addr = __ppc_ba(bsh, ofs);
326 *addr = val;
327 powerpc_iomb();
328 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
329 }
330
331 static void
332 native_bs_ws_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val)
333 {
334 volatile uint64_t *addr;
335
336 addr = __ppc_ba(bsh, ofs);
337 *addr = val;
338 powerpc_iomb();
339 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
340 }
341
342 static void
343 native_bs_wm_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
344 bus_size_t cnt)
345 {
346 outsb(__ppc_ba(bsh, ofs), addr, cnt);
347 }
348
349 static void
350 native_bs_wm_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
351 bus_size_t cnt)
352 {
353 outsw(__ppc_ba(bsh, ofs), addr, cnt);
354 }
355
356 static void
357 native_bs_wm_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
358 bus_size_t cnt)
359 {
360 outsl(__ppc_ba(bsh, ofs), addr, cnt);
361 }
362
363 static void
364 native_bs_wm_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
365 bus_size_t cnt)
366 {
367 outsll(__ppc_ba(bsh, ofs), addr, cnt);
368 }
369
370 static void
371 native_bs_wr_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
372 size_t cnt)
373 {
374 volatile uint8_t *d = __ppc_ba(bsh, ofs);
375
376 while (cnt--)
377 *d++ = *addr++;
378 powerpc_iomb();
379 }
380
381 static void
382 native_bs_wr_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
383 size_t cnt)
384 {
385 volatile uint16_t *d = __ppc_ba(bsh, ofs);
386
387 while (cnt--)
388 *d++ = *addr++;
389 powerpc_iomb();
390 }
391
392 static void
393 native_bs_wr_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
394 size_t cnt)
395 {
396 volatile uint32_t *d = __ppc_ba(bsh, ofs);
397
398 while (cnt--)
399 *d++ = *addr++;
400 powerpc_iomb();
401 }
402
403 static void
404 native_bs_wr_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
405 size_t cnt)
406 {
407 volatile uint64_t *d = __ppc_ba(bsh, ofs);
408
409 while (cnt--)
410 *d++ = *addr++;
411 powerpc_iomb();
412 }
413
414 static void
415 native_bs_sm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
416 {
417 volatile uint8_t *d = __ppc_ba(bsh, ofs);
418
419 while (cnt--)
420 *d = val;
421 powerpc_iomb();
422 }
423
424 static void
425 native_bs_sm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
426 {
427 volatile uint16_t *d = __ppc_ba(bsh, ofs);
428
429 while (cnt--)
430 *d = val;
431 powerpc_iomb();
432 }
433
434 static void
435 native_bs_sm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
436 {
437 volatile uint32_t *d = __ppc_ba(bsh, ofs);
438
439 while (cnt--)
440 *d = val;
441 powerpc_iomb();
442 }
443
444 static void
445 native_bs_sm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
446 {
447 volatile uint64_t *d = __ppc_ba(bsh, ofs);
448
449 while (cnt--)
450 *d = val;
451 powerpc_iomb();
452 }
453
454 static void
455 native_bs_sr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
456 {
457 volatile uint8_t *d = __ppc_ba(bsh, ofs);
458
459 while (cnt--)
460 *d++ = val;
461 powerpc_iomb();
462 }
463
464 static void
465 native_bs_sr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
466 {
467 volatile uint16_t *d = __ppc_ba(bsh, ofs);
468
469 while (cnt--)
470 *d++ = val;
471 powerpc_iomb();
472 }
473
474 static void
475 native_bs_sr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
476 {
477 volatile uint32_t *d = __ppc_ba(bsh, ofs);
478
479 while (cnt--)
480 *d++ = val;
481 powerpc_iomb();
482 }
483
484 static void
485 native_bs_sr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
486 {
487 volatile uint64_t *d = __ppc_ba(bsh, ofs);
488
489 while (cnt--)
490 *d++ = val;
491 powerpc_iomb();
492 }
493
494 /*
495 * Byteswapped access functions
496 */
497 static uint8_t
498 swapped_bs_rs_1(bus_space_handle_t bsh, bus_size_t ofs)
499 {
500 volatile uint8_t *addr;
501 uint8_t res;
502
503 addr = __ppc_ba(bsh, ofs);
504 res = *addr;
505 powerpc_iomb();
506 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
507 return (res);
508 }
509
510 static uint16_t
511 swapped_bs_rs_2(bus_space_handle_t bsh, bus_size_t ofs)
512 {
513 volatile uint16_t *addr;
514 uint16_t res;
515
516 addr = __ppc_ba(bsh, ofs);
517 __asm __volatile("lhbrx %0, 0, %1" : "=r"(res) : "r"(addr));
518 powerpc_iomb();
519 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
520 return (res);
521 }
522
523 static uint32_t
524 swapped_bs_rs_4(bus_space_handle_t bsh, bus_size_t ofs)
525 {
526 volatile uint32_t *addr;
527 uint32_t res;
528
529 addr = __ppc_ba(bsh, ofs);
530 __asm __volatile("lwbrx %0, 0, %1" : "=r"(res) : "r"(addr));
531 powerpc_iomb();
532 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
533 return (res);
534 }
535
536 static uint64_t
537 swapped_bs_rs_8(bus_space_handle_t bsh, bus_size_t ofs)
538 {
539 volatile uint64_t *addr;
540 uint64_t res;
541
542 addr = __ppc_ba(bsh, ofs);
543 res = le64toh(*addr);
544 powerpc_iomb();
545 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
546 return (res);
547 }
548
549 static void
550 swapped_bs_rm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
551 {
552 ins8(__ppc_ba(bsh, ofs), addr, cnt);
553 }
554
555 static void
556 swapped_bs_rm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
557 {
558 ins16rb(__ppc_ba(bsh, ofs), addr, cnt);
559 }
560
561 static void
562 swapped_bs_rm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
563 {
564 ins32rb(__ppc_ba(bsh, ofs), addr, cnt);
565 }
566
567 static void
568 swapped_bs_rm_8(bus_space_handle_t bshh, bus_size_t ofs, uint64_t *addr, size_t cnt)
569 {
570 TODO;
571 }
572
573 static void
574 swapped_bs_rr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
575 {
576 volatile uint8_t *s = __ppc_ba(bsh, ofs);
577
578 while (cnt--)
579 *addr++ = *s++;
580 powerpc_iomb();
581 }
582
583 static void
584 swapped_bs_rr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
585 {
586 volatile uint16_t *s = __ppc_ba(bsh, ofs);
587
588 while (cnt--)
589 *addr++ = in16rb(s++);
590 powerpc_iomb();
591 }
592
593 static void
594 swapped_bs_rr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
595 {
596 volatile uint32_t *s = __ppc_ba(bsh, ofs);
597
598 while (cnt--)
599 *addr++ = in32rb(s++);
600 powerpc_iomb();
601 }
602
603 static void
604 swapped_bs_rr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
605 {
606 TODO;
607 }
608
609 static void
610 swapped_bs_ws_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val)
611 {
612 volatile uint8_t *addr;
613
614 addr = __ppc_ba(bsh, ofs);
615 *addr = val;
616 powerpc_iomb();
617 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
618 }
619
620 static void
621 swapped_bs_ws_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val)
622 {
623 volatile uint16_t *addr;
624
625 addr = __ppc_ba(bsh, ofs);
626 __asm __volatile("sthbrx %0, 0, %1" :: "r"(val), "r"(addr));
627 powerpc_iomb();
628 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
629 }
630
631 static void
632 swapped_bs_ws_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val)
633 {
634 volatile uint32_t *addr;
635
636 addr = __ppc_ba(bsh, ofs);
637 __asm __volatile("stwbrx %0, 0, %1" :: "r"(val), "r"(addr));
638 powerpc_iomb();
639 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
640 }
641
642 static void
643 swapped_bs_ws_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val)
644 {
645 volatile uint64_t *addr;
646
647 addr = __ppc_ba(bsh, ofs);
648 *addr = htole64(val);
649 powerpc_iomb();
650 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
651 }
652
653 static void
654 swapped_bs_wm_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
655 bus_size_t cnt)
656 {
657 outs8(__ppc_ba(bsh, ofs), addr, cnt);
658 }
659
660 static void
661 swapped_bs_wm_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
662 bus_size_t cnt)
663 {
664 outs16rb(__ppc_ba(bsh, ofs), addr, cnt);
665 }
666
667 static void
668 swapped_bs_wm_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
669 bus_size_t cnt)
670 {
671 outs32rb(__ppc_ba(bsh, ofs), addr, cnt);
672 }
673
674 static void
675 swapped_bs_wm_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
676 bus_size_t cnt)
677 {
678 TODO;
679 }
680
681 static void
682 swapped_bs_wr_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
683 size_t cnt)
684 {
685 volatile uint8_t *d = __ppc_ba(bsh, ofs);
686
687 while (cnt--)
688 *d++ = *addr++;
689 powerpc_iomb();
690 }
691
692 static void
693 swapped_bs_wr_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
694 size_t cnt)
695 {
696 volatile uint16_t *d = __ppc_ba(bsh, ofs);
697
698 while (cnt--)
699 out16rb(d++, *addr++);
700 powerpc_iomb();
701 }
702
703 static void
704 swapped_bs_wr_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
705 size_t cnt)
706 {
707 volatile uint32_t *d = __ppc_ba(bsh, ofs);
708
709 while (cnt--)
710 out32rb(d++, *addr++);
711 powerpc_iomb();
712 }
713
714 static void
715 swapped_bs_wr_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
716 size_t cnt)
717 {
718 TODO;
719 }
720
721 static void
722 swapped_bs_sm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
723 {
724 volatile uint8_t *d = __ppc_ba(bsh, ofs);
725
726 while (cnt--)
727 *d = val;
728 powerpc_iomb();
729 }
730
731 static void
732 swapped_bs_sm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
733 {
734 volatile uint16_t *d = __ppc_ba(bsh, ofs);
735
736 while (cnt--)
737 out16rb(d, val);
738 powerpc_iomb();
739 }
740
741 static void
742 swapped_bs_sm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
743 {
744 volatile uint32_t *d = __ppc_ba(bsh, ofs);
745
746 while (cnt--)
747 out32rb(d, val);
748 powerpc_iomb();
749 }
750
751 static void
752 swapped_bs_sm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
753 {
754 TODO;
755 }
756
757 static void
758 swapped_bs_sr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
759 {
760 volatile uint8_t *d = __ppc_ba(bsh, ofs);
761
762 while (cnt--)
763 *d++ = val;
764 powerpc_iomb();
765 }
766
767 static void
768 swapped_bs_sr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
769 {
770 volatile uint16_t *d = __ppc_ba(bsh, ofs);
771
772 while (cnt--)
773 out16rb(d++, val);
774 powerpc_iomb();
775 }
776
777 static void
778 swapped_bs_sr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
779 {
780 volatile uint32_t *d = __ppc_ba(bsh, ofs);
781
782 while (cnt--)
783 out32rb(d++, val);
784 powerpc_iomb();
785 }
786
787 static void
788 swapped_bs_sr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
789 {
790 TODO;
791 }
792
793 #if BYTE_ORDER == LITTLE_ENDIAN
794 struct bus_space bs_le_tag = {
795 #else
796 struct bus_space bs_be_tag = {
797 #endif
798 /* mapping/unmapping */
799 .bs_map = bs_gen_map,
800 .bs_unmap = bs_gen_unmap,
801 .bs_subregion = bs_gen_subregion,
802
803 /* allocation/deallocation */
804 .bs_alloc = bs_gen_alloc,
805 .bs_free = bs_gen_free,
806
807 /* barrier */
808 .bs_barrier = bs_gen_barrier,
809
810 /* read (single) */
811 .bs_r_1 = native_bs_rs_1,
812 .bs_r_2 = native_bs_rs_2,
813 .bs_r_4 = native_bs_rs_4,
814 .bs_r_8 = native_bs_rs_8,
815
816 /* read (single) stream */
817 .bs_r_s_2 = native_bs_rs_2,
818 .bs_r_s_4 = native_bs_rs_4,
819 .bs_r_s_8 = native_bs_rs_8,
820
821 /* read multiple */
822 .bs_rm_1 = native_bs_rm_1,
823 .bs_rm_2 = native_bs_rm_2,
824 .bs_rm_4 = native_bs_rm_4,
825 .bs_rm_8 = native_bs_rm_8,
826
827 /* read multiple stream */
828 .bs_rm_s_2 = native_bs_rm_2,
829 .bs_rm_s_4 = native_bs_rm_4,
830 .bs_rm_s_8 = native_bs_rm_8,
831
832 /* read region */
833 .bs_rr_1 = native_bs_rr_1,
834 .bs_rr_2 = native_bs_rr_2,
835 .bs_rr_4 = native_bs_rr_4,
836 .bs_rr_8 = native_bs_rr_8,
837
838 /* read region stream */
839 .bs_rr_s_2 = native_bs_rr_2,
840 .bs_rr_s_4 = native_bs_rr_4,
841 .bs_rr_s_8 = native_bs_rr_8,
842
843 /* write (single) */
844 .bs_w_1 = native_bs_ws_1,
845 .bs_w_2 = native_bs_ws_2,
846 .bs_w_4 = native_bs_ws_4,
847 .bs_w_8 = native_bs_ws_8,
848
849 /* write (single) stream */
850 .bs_w_s_2 = native_bs_ws_2,
851 .bs_w_s_4 = native_bs_ws_4,
852 .bs_w_s_8 = native_bs_ws_8,
853
854 /* write multiple */
855 .bs_wm_1 = native_bs_wm_1,
856 .bs_wm_2 = native_bs_wm_2,
857 .bs_wm_4 = native_bs_wm_4,
858 .bs_wm_8 = native_bs_wm_8,
859
860 /* write multiple stream */
861 .bs_wm_s_2 = native_bs_wm_2,
862 .bs_wm_s_4 = native_bs_wm_4,
863 .bs_wm_s_8 = native_bs_wm_8,
864
865 /* write region */
866 .bs_wr_1 = native_bs_wr_1,
867 .bs_wr_2 = native_bs_wr_2,
868 .bs_wr_4 = native_bs_wr_4,
869 .bs_wr_8 = native_bs_wr_8,
870
871 /* write region stream */
872 .bs_wr_s_2 = native_bs_wr_2,
873 .bs_wr_s_4 = native_bs_wr_4,
874 .bs_wr_s_8 = native_bs_wr_8,
875
876 /* set multiple */
877 .bs_sm_1 = native_bs_sm_1,
878 .bs_sm_2 = native_bs_sm_2,
879 .bs_sm_4 = native_bs_sm_4,
880 .bs_sm_8 = native_bs_sm_8,
881
882 /* set multiple stream */
883 .bs_sm_s_2 = native_bs_sm_2,
884 .bs_sm_s_4 = native_bs_sm_4,
885 .bs_sm_s_8 = native_bs_sm_8,
886
887 /* set region */
888 .bs_sr_1 = native_bs_sr_1,
889 .bs_sr_2 = native_bs_sr_2,
890 .bs_sr_4 = native_bs_sr_4,
891 .bs_sr_8 = native_bs_sr_8,
892
893 /* set region stream */
894 .bs_sr_s_2 = native_bs_sr_2,
895 .bs_sr_s_4 = native_bs_sr_4,
896 .bs_sr_s_8 = native_bs_sr_8,
897
898 /* copy region */
899 .bs_cr_1 = NULL, /* UNIMPLEMENTED */
900 .bs_cr_2 = NULL, /* UNIMPLEMENTED */
901 .bs_cr_4 = NULL, /* UNIMPLEMENTED */
902 .bs_cr_8 = NULL, /* UNIMPLEMENTED */
903
904 /* copy region stream */
905 .bs_cr_s_2 = NULL, /* UNIMPLEMENTED */
906 .bs_cr_s_4 = NULL, /* UNIMPLEMENTED */
907 .bs_cr_s_8 = NULL, /* UNIMPLEMENTED */
908 };
909
910 #if BYTE_ORDER == LITTLE_ENDIAN
911 struct bus_space bs_be_tag = {
912 #else
913 struct bus_space bs_le_tag = {
914 #endif
915 /* mapping/unmapping */
916 .bs_map = bs_gen_map,
917 .bs_unmap = bs_gen_unmap,
918 .bs_subregion = bs_gen_subregion,
919
920 /* allocation/deallocation */
921 .bs_alloc = bs_gen_alloc,
922 .bs_free = bs_gen_free,
923
924 /* barrier */
925 .bs_barrier = bs_gen_barrier,
926
927 /* read (single) */
928 .bs_r_1 = swapped_bs_rs_1,
929 .bs_r_2 = swapped_bs_rs_2,
930 .bs_r_4 = swapped_bs_rs_4,
931 .bs_r_8 = swapped_bs_rs_8,
932
933 /* read (single) stream */
934 .bs_r_s_2 = native_bs_rs_2,
935 .bs_r_s_4 = native_bs_rs_4,
936 .bs_r_s_8 = native_bs_rs_8,
937
938 /* read multiple */
939 .bs_rm_1 = swapped_bs_rm_1,
940 .bs_rm_2 = swapped_bs_rm_2,
941 .bs_rm_4 = swapped_bs_rm_4,
942 .bs_rm_8 = swapped_bs_rm_8,
943
944 /* read multiple stream */
945 .bs_rm_s_2 = native_bs_rm_2,
946 .bs_rm_s_4 = native_bs_rm_4,
947 .bs_rm_s_8 = native_bs_rm_8,
948
949 /* read region */
950 .bs_rr_1 = swapped_bs_rr_1,
951 .bs_rr_2 = swapped_bs_rr_2,
952 .bs_rr_4 = swapped_bs_rr_4,
953 .bs_rr_8 = swapped_bs_rr_8,
954
955 /* read region stream */
956 .bs_rr_s_2 = native_bs_rr_2,
957 .bs_rr_s_4 = native_bs_rr_4,
958 .bs_rr_s_8 = native_bs_rr_8,
959
960 /* write (single) */
961 .bs_w_1 = swapped_bs_ws_1,
962 .bs_w_2 = swapped_bs_ws_2,
963 .bs_w_4 = swapped_bs_ws_4,
964 .bs_w_8 = swapped_bs_ws_8,
965
966 /* write (single) stream */
967 .bs_w_s_2 = native_bs_ws_2,
968 .bs_w_s_4 = native_bs_ws_4,
969 .bs_w_s_8 = native_bs_ws_8,
970
971 /* write multiple */
972 .bs_wm_1 = swapped_bs_wm_1,
973 .bs_wm_2 = swapped_bs_wm_2,
974 .bs_wm_4 = swapped_bs_wm_4,
975 .bs_wm_8 = swapped_bs_wm_8,
976
977 /* write multiple stream */
978 .bs_wm_s_2 = native_bs_wm_2,
979 .bs_wm_s_4 = native_bs_wm_4,
980 .bs_wm_s_8 = native_bs_wm_8,
981
982 /* write region */
983 .bs_wr_1 = swapped_bs_wr_1,
984 .bs_wr_2 = swapped_bs_wr_2,
985 .bs_wr_4 = swapped_bs_wr_4,
986 .bs_wr_8 = swapped_bs_wr_8,
987
988 /* write region stream */
989 .bs_wr_s_2 = native_bs_wr_2,
990 .bs_wr_s_4 = native_bs_wr_4,
991 .bs_wr_s_8 = native_bs_wr_8,
992
993 /* set multiple */
994 .bs_sm_1 = swapped_bs_sm_1,
995 .bs_sm_2 = swapped_bs_sm_2,
996 .bs_sm_4 = swapped_bs_sm_4,
997 .bs_sm_8 = swapped_bs_sm_8,
998
999 /* set multiple stream */
1000 .bs_sm_s_2 = native_bs_sm_2,
1001 .bs_sm_s_4 = native_bs_sm_4,
1002 .bs_sm_s_8 = native_bs_sm_8,
1003
1004 /* set region */
1005 .bs_sr_1 = swapped_bs_sr_1,
1006 .bs_sr_2 = swapped_bs_sr_2,
1007 .bs_sr_4 = swapped_bs_sr_4,
1008 .bs_sr_8 = swapped_bs_sr_8,
1009
1010 /* set region stream */
1011 .bs_sr_s_2 = native_bs_sr_2,
1012 .bs_sr_s_4 = native_bs_sr_4,
1013 .bs_sr_s_8 = native_bs_sr_8,
1014
1015 /* copy region */
1016 .bs_cr_1 = NULL, /* UNIMPLEMENTED */
1017 .bs_cr_2 = NULL, /* UNIMPLEMENTED */
1018 .bs_cr_4 = NULL, /* UNIMPLEMENTED */
1019 .bs_cr_8 = NULL, /* UNIMPLEMENTED */
1020
1021 /* copy region stream */
1022 .bs_cr_s_2 = NULL, /* UNIMPLEMENTED */
1023 .bs_cr_s_4 = NULL, /* UNIMPLEMENTED */
1024 .bs_cr_s_8 = NULL, /* UNIMPLEMENTED */
1025 };
Cache object: a2074337a9acd61d365bcc1b603a48f3
|