summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--target/ppc/cpu.h20
-rw-r--r--target/ppc/translate/vsx-impl.inc.c34
2 files changed, 14 insertions, 40 deletions
diff --git a/target/ppc/cpu.h b/target/ppc/cpu.h
index 612dd05e94..fc12b4688e 100644
--- a/target/ppc/cpu.h
+++ b/target/ppc/cpu.h
@@ -2583,34 +2583,34 @@ static inline bool lsw_reg_in_range(int start, int nregs, int rx)
#define VsrSD(i) s64[1 - (i)]
#endif
-static inline int fpr_offset(int i)
+static inline int vsr64_offset(int i, bool high)
{
- return offsetof(CPUPPCState, vsr[i].VsrD(0));
+ return offsetof(CPUPPCState, vsr[i].VsrD(high ? 0 : 1));
}
-static inline uint64_t *cpu_fpr_ptr(CPUPPCState *env, int i)
+static inline int vsr_full_offset(int i)
{
- return (uint64_t *)((uintptr_t)env + fpr_offset(i));
+ return offsetof(CPUPPCState, vsr[i].u64[0]);
}
-static inline int vsrl_offset(int i)
+static inline int fpr_offset(int i)
{
- return offsetof(CPUPPCState, vsr[i].VsrD(1));
+ return vsr64_offset(i, true);
}
-static inline int vsr_full_offset(int i)
+static inline uint64_t *cpu_fpr_ptr(CPUPPCState *env, int i)
{
- return offsetof(CPUPPCState, vsr[i].u64[0]);
+ return (uint64_t *)((uintptr_t)env + fpr_offset(i));
}
static inline uint64_t *cpu_vsrl_ptr(CPUPPCState *env, int i)
{
- return (uint64_t *)((uintptr_t)env + vsrl_offset(i));
+ return (uint64_t *)((uintptr_t)env + vsr64_offset(i, false));
}
static inline long avr64_offset(int i, bool high)
{
- return offsetof(CPUPPCState, vsr[32 + i].VsrD(high ? 0 : 1));
+ return vsr64_offset(i + 32, high);
}
static inline int avr_full_offset(int i)
diff --git a/target/ppc/translate/vsx-impl.inc.c b/target/ppc/translate/vsx-impl.inc.c
index 7d02a235e7..95a269fff0 100644
--- a/target/ppc/translate/vsx-impl.inc.c
+++ b/target/ppc/translate/vsx-impl.inc.c
@@ -1,49 +1,23 @@
/*** VSX extension ***/
-static inline void get_vsrl(TCGv_i64 dst, int n)
-{
- tcg_gen_ld_i64(dst, cpu_env, vsrl_offset(n));
-}
-
-static inline void set_vsrl(int n, TCGv_i64 src)
-{
- tcg_gen_st_i64(src, cpu_env, vsrl_offset(n));
-}
-
static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
{
- if (n < 32) {
- get_fpr(dst, n);
- } else {
- get_avr64(dst, n - 32, true);
- }
+ tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
}
static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
{
- if (n < 32) {
- get_vsrl(dst, n);
- } else {
- get_avr64(dst, n - 32, false);
- }
+ tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
}
static inline void set_cpu_vsrh(int n, TCGv_i64 src)
{
- if (n < 32) {
- set_fpr(n, src);
- } else {
- set_avr64(n - 32, src, true);
- }
+ tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
}
static inline void set_cpu_vsrl(int n, TCGv_i64 src)
{
- if (n < 32) {
- set_vsrl(n, src);
- } else {
- set_avr64(n - 32, src, false);
- }
+ tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
}
#define VSX_LOAD_SCALAR(name, operation) \