1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
|
/* ARM memory operations. */
/* Load from address T1 into T0. */
#define MEM_LD_OP(name) \
void OPPROTO glue(op_ld##name,MEMSUFFIX)(void) \
{ \
T0 = glue(ld##name,MEMSUFFIX)(T1); \
FORCE_RET(); \
}
MEM_LD_OP(ub)
MEM_LD_OP(sb)
MEM_LD_OP(uw)
MEM_LD_OP(sw)
MEM_LD_OP(l)
#undef MEM_LD_OP
/* Store T0 to address T1. */
#define MEM_ST_OP(name) \
void OPPROTO glue(op_st##name,MEMSUFFIX)(void) \
{ \
glue(st##name,MEMSUFFIX)(T1, T0); \
FORCE_RET(); \
}
MEM_ST_OP(b)
MEM_ST_OP(w)
MEM_ST_OP(l)
#undef MEM_ST_OP
/* Swap T0 with memory at address T1. */
/* ??? Is this exception safe? */
#define MEM_SWP_OP(name, lname) \
void OPPROTO glue(op_swp##name,MEMSUFFIX)(void) \
{ \
uint32_t tmp; \
cpu_lock(); \
tmp = glue(ld##lname,MEMSUFFIX)(T1); \
glue(st##name,MEMSUFFIX)(T1, T0); \
T0 = tmp; \
cpu_unlock(); \
FORCE_RET(); \
}
MEM_SWP_OP(b, ub)
MEM_SWP_OP(l, l)
#undef MEM_SWP_OP
/* Floating point load/store. Address is in T1 */
#define VFP_MEM_OP(p, w) \
void OPPROTO glue(op_vfp_ld##p,MEMSUFFIX)(void) \
{ \
FT0##p = glue(ldf##w,MEMSUFFIX)(T1); \
FORCE_RET(); \
} \
void OPPROTO glue(op_vfp_st##p,MEMSUFFIX)(void) \
{ \
glue(stf##w,MEMSUFFIX)(T1, FT0##p); \
FORCE_RET(); \
}
VFP_MEM_OP(s,l)
VFP_MEM_OP(d,q)
#undef VFP_MEM_OP
/* iwMMXt load/store. Address is in T1 */
#define MMX_MEM_OP(name, ldname) \
void OPPROTO glue(op_iwmmxt_ld##name,MEMSUFFIX)(void) \
{ \
M0 = glue(ld##ldname,MEMSUFFIX)(T1); \
FORCE_RET(); \
} \
void OPPROTO glue(op_iwmmxt_st##name,MEMSUFFIX)(void) \
{ \
glue(st##name,MEMSUFFIX)(T1, M0); \
FORCE_RET(); \
}
MMX_MEM_OP(b, ub)
MMX_MEM_OP(w, uw)
MMX_MEM_OP(l, l)
MMX_MEM_OP(q, q)
#undef MMX_MEM_OP
#undef MEMSUFFIX
|