1 #ifndef __ALPHA_COMPILER_H
2 #define __ALPHA_COMPILER_H
3
4 /*
5 * Herein are macros we use when describing various patterns we want to GCC.
6 * In all cases we can get better schedules out of the compiler if we hide
7 * as little as possible inside inline assembly. However, we want to be
8 * able to know what we'll get out before giving up inline assembly. Thus
9 * these tests and macros.
10 */
11
12 #if 0
13 #define __kernel_insbl(val, shift) \
14 (((unsigned long)(val) & 0xfful) << ((shift) * 8))
15 #define __kernel_inswl(val, shift) \
16 (((unsigned long)(val) & 0xfffful) << ((shift) * 8))
17 #define __kernel_insql(val, shift) \
18 ((unsigned long)(val) << ((shift) * 8))
19 #else
20 #define __kernel_insbl(val, shift) \
21 ({ unsigned long __kir; \
22 __asm__("insbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
23 __kir; })
24 #define __kernel_inswl(val, shift) \
25 ({ unsigned long __kir; \
26 __asm__("inswl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
27 __kir; })
28 #define __kernel_insql(val, shift) \
29 ({ unsigned long __kir; \
30 __asm__("insql %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
31 __kir; })
32 #endif
33
34 #if 0 && (__GNUC__ > 2 || __GNUC_MINOR__ >= 92)
35 #define __kernel_extbl(val, shift) (((val) >> (((shift) & 7) * 8)) & 0xfful)
36 #define __kernel_extwl(val, shift) (((val) >> (((shift) & 7) * 8)) & 0xfffful)
37 #else
38 #define __kernel_extbl(val, shift) \
39 ({ unsigned long __kir; \
40 __asm__("extbl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
41 __kir; })
42 #define __kernel_extwl(val, shift) \
43 ({ unsigned long __kir; \
44 __asm__("extwl %2,%1,%0" : "=r"(__kir) : "rI"(shift), "r"(val)); \
45 __kir; })
46 #endif
47
48
49 /*
50 * Beginning with EGCS 1.1, GCC defines __alpha_bwx__ when the BWX
51 * extension is enabled. Previous versions did not define anything
52 * we could test during compilation -- too bad, so sad.
53 */
54
55 #if defined(__alpha_bwx__)
56 #define __kernel_ldbu(mem) (mem)
57 #define __kernel_ldwu(mem) (mem)
58 #define __kernel_stb(val,mem) ((mem) = (val))
59 #define __kernel_stw(val,mem) ((mem) = (val))
60 #else
61 #define __kernel_ldbu(mem) \
62 ({ unsigned char __kir; \
63 __asm__("ldbu %0,%1" : "=r"(__kir) : "m"(mem)); \
64 __kir; })
65 #define __kernel_ldwu(mem) \
66 ({ unsigned short __kir; \
67 __asm__("ldwu %0,%1" : "=r"(__kir) : "m"(mem)); \
68 __kir; })
69 #define __kernel_stb(val,mem) \
70 __asm__("stb %1,%0" : "=m"(mem) : "r"(val))
71 #define __kernel_stw(val,mem) \
72 __asm__("stw %1,%0" : "=m"(mem) : "r"(val))
73 #endif
74
75 #endif /* __ALPHA_COMPILER_H */
Cache object: fe166459bda434ee53b47f470773c41f
|