aboutsummaryrefslogtreecommitdiff
path: root/include/asm-powerpc/ppc_asm.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-powerpc/ppc_asm.h')
-rw-r--r--include/asm-powerpc/ppc_asm.h114
1 files changed, 112 insertions, 2 deletions
diff --git a/include/asm-powerpc/ppc_asm.h b/include/asm-powerpc/ppc_asm.h
index 2dbd4e7884f..0966899d974 100644
--- a/include/asm-powerpc/ppc_asm.h
+++ b/include/asm-powerpc/ppc_asm.h
@@ -6,6 +6,7 @@
#include <linux/stringify.h>
#include <asm/asm-compat.h>
+#include <asm/processor.h>
#ifndef __ASSEMBLY__
#error __FILE__ should only be used in assembler files
@@ -73,6 +74,15 @@ END_FTR_SECTION_IFCLR(CPU_FTR_PURR); \
REST_10GPRS(22, base)
#endif
+/*
+ * Define what the VSX XX1 form instructions will look like, then add
+ * the 128 bit load store instructions based on that.
+ */
+#define VSX_XX1(xs, ra, rb) (((xs) & 0x1f) << 21 | ((ra) << 16) | \
+ ((rb) << 11) | (((xs) >> 5)))
+
+#define STXVD2X(xs, ra, rb) .long (0x7c000798 | VSX_XX1((xs), (ra), (rb)))
+#define LXVD2X(xs, ra, rb) .long (0x7c000698 | VSX_XX1((xs), (ra), (rb)))
#define SAVE_2GPRS(n, base) SAVE_GPR(n, base); SAVE_GPR(n+1, base)
#define SAVE_4GPRS(n, base) SAVE_2GPRS(n, base); SAVE_2GPRS(n+2, base)
@@ -83,13 +93,13 @@ END_FTR_SECTION_IFCLR(CPU_FTR_PURR); \
#define REST_8GPRS(n, base) REST_4GPRS(n, base); REST_4GPRS(n+4, base)
#define REST_10GPRS(n, base) REST_8GPRS(n, base); REST_2GPRS(n+8, base)
-#define SAVE_FPR(n, base) stfd n,THREAD_FPR0+8*(n)(base)
+#define SAVE_FPR(n, base) stfd n,THREAD_FPR0+8*TS_FPRWIDTH*(n)(base)
#define SAVE_2FPRS(n, base) SAVE_FPR(n, base); SAVE_FPR(n+1, base)
#define SAVE_4FPRS(n, base) SAVE_2FPRS(n, base); SAVE_2FPRS(n+2, base)
#define SAVE_8FPRS(n, base) SAVE_4FPRS(n, base); SAVE_4FPRS(n+4, base)
#define SAVE_16FPRS(n, base) SAVE_8FPRS(n, base); SAVE_8FPRS(n+8, base)
#define SAVE_32FPRS(n, base) SAVE_16FPRS(n, base); SAVE_16FPRS(n+16, base)
-#define REST_FPR(n, base) lfd n,THREAD_FPR0+8*(n)(base)
+#define REST_FPR(n, base) lfd n,THREAD_FPR0+8*TS_FPRWIDTH*(n)(base)
#define REST_2FPRS(n, base) REST_FPR(n, base); REST_FPR(n+1, base)
#define REST_4FPRS(n, base) REST_2FPRS(n, base); REST_2FPRS(n+2, base)
#define REST_8FPRS(n, base) REST_4FPRS(n, base); REST_4FPRS(n+4, base)
@@ -109,6 +119,33 @@ END_FTR_SECTION_IFCLR(CPU_FTR_PURR); \
#define REST_16VRS(n,b,base) REST_8VRS(n,b,base); REST_8VRS(n+8,b,base)
#define REST_32VRS(n,b,base) REST_16VRS(n,b,base); REST_16VRS(n+16,b,base)
+/* Save the lower 32 VSRs in the thread VSR region */
+#define SAVE_VSR(n,b,base) li b,THREAD_VSR0+(16*(n)); STXVD2X(n,b,base)
+#define SAVE_2VSRS(n,b,base) SAVE_VSR(n,b,base); SAVE_VSR(n+1,b,base)
+#define SAVE_4VSRS(n,b,base) SAVE_2VSRS(n,b,base); SAVE_2VSRS(n+2,b,base)
+#define SAVE_8VSRS(n,b,base) SAVE_4VSRS(n,b,base); SAVE_4VSRS(n+4,b,base)
+#define SAVE_16VSRS(n,b,base) SAVE_8VSRS(n,b,base); SAVE_8VSRS(n+8,b,base)
+#define SAVE_32VSRS(n,b,base) SAVE_16VSRS(n,b,base); SAVE_16VSRS(n+16,b,base)
+#define REST_VSR(n,b,base) li b,THREAD_VSR0+(16*(n)); LXVD2X(n,b,base)
+#define REST_2VSRS(n,b,base) REST_VSR(n,b,base); REST_VSR(n+1,b,base)
+#define REST_4VSRS(n,b,base) REST_2VSRS(n,b,base); REST_2VSRS(n+2,b,base)
+#define REST_8VSRS(n,b,base) REST_4VSRS(n,b,base); REST_4VSRS(n+4,b,base)
+#define REST_16VSRS(n,b,base) REST_8VSRS(n,b,base); REST_8VSRS(n+8,b,base)
+#define REST_32VSRS(n,b,base) REST_16VSRS(n,b,base); REST_16VSRS(n+16,b,base)
+/* Save the upper 32 VSRs (32-63) in the thread VSX region (0-31) */
+#define SAVE_VSRU(n,b,base) li b,THREAD_VR0+(16*(n)); STXVD2X(n+32,b,base)
+#define SAVE_2VSRSU(n,b,base) SAVE_VSRU(n,b,base); SAVE_VSRU(n+1,b,base)
+#define SAVE_4VSRSU(n,b,base) SAVE_2VSRSU(n,b,base); SAVE_2VSRSU(n+2,b,base)
+#define SAVE_8VSRSU(n,b,base) SAVE_4VSRSU(n,b,base); SAVE_4VSRSU(n+4,b,base)
+#define SAVE_16VSRSU(n,b,base) SAVE_8VSRSU(n,b,base); SAVE_8VSRSU(n+8,b,base)
+#define SAVE_32VSRSU(n,b,base) SAVE_16VSRSU(n,b,base); SAVE_16VSRSU(n+16,b,base)
+#define REST_VSRU(n,b,base) li b,THREAD_VR0+(16*(n)); LXVD2X(n+32,b,base)
+#define REST_2VSRSU(n,b,base) REST_VSRU(n,b,base); REST_VSRU(n+1,b,base)
+#define REST_4VSRSU(n,b,base) REST_2VSRSU(n,b,base); REST_2VSRSU(n+2,b,base)
+#define REST_8VSRSU(n,b,base) REST_4VSRSU(n,b,base); REST_4VSRSU(n+4,b,base)
+#define REST_16VSRSU(n,b,base) REST_8VSRSU(n,b,base); REST_8VSRSU(n+8,b,base)
+#define REST_32VSRSU(n,b,base) REST_16VSRSU(n,b,base); REST_16VSRSU(n+16,b,base)
+
#define SAVE_EVR(n,s,base) evmergehi s,s,n; stw s,THREAD_EVR0+4*(n)(base)
#define SAVE_2EVRS(n,s,base) SAVE_EVR(n,s,base); SAVE_EVR(n+1,s,base)
#define SAVE_4EVRS(n,s,base) SAVE_2EVRS(n,s,base); SAVE_2EVRS(n+2,s,base)
@@ -356,6 +393,12 @@ END_FTR_SECTION_IFCLR(CPU_FTR_601)
#define toreal(rd)
#define fromreal(rd)
+/*
+ * We use addis to ensure compatibility with the "classic" ppc versions of
+ * these macros, which use rs = 0 to get the tophys offset in rd, rather than
+ * converting the address in r0, and so this version has to do that too
+ * (i.e. set register rd to 0 when rs == 0).
+ */
#define tophys(rd,rs) \
addis rd,rs,0
@@ -533,6 +576,73 @@ END_FTR_SECTION_IFCLR(CPU_FTR_601)
#define vr30 30
#define vr31 31
+/* VSX Registers (VSRs) */
+
+#define vsr0 0
+#define vsr1 1
+#define vsr2 2
+#define vsr3 3
+#define vsr4 4
+#define vsr5 5
+#define vsr6 6
+#define vsr7 7
+#define vsr8 8
+#define vsr9 9
+#define vsr10 10
+#define vsr11 11
+#define vsr12 12
+#define vsr13 13
+#define vsr14 14
+#define vsr15 15
+#define vsr16 16
+#define vsr17 17
+#define vsr18 18
+#define vsr19 19
+#define vsr20 20
+#define vsr21 21
+#define vsr22 22
+#define vsr23 23
+#define vsr24 24
+#define vsr25 25
+#define vsr26 26
+#define vsr27 27
+#define vsr28 28
+#define vsr29 29
+#define vsr30 30
+#define vsr31 31
+#define vsr32 32
+#define vsr33 33
+#define vsr34 34
+#define vsr35 35
+#define vsr36 36
+#define vsr37 37
+#define vsr38 38
+#define vsr39 39
+#define vsr40 40
+#define vsr41 41
+#define vsr42 42
+#define vsr43 43
+#define vsr44 44
+#define vsr45 45
+#define vsr46 46
+#define vsr47 47
+#define vsr48 48
+#define vsr49 49
+#define vsr50 50
+#define vsr51 51
+#define vsr52 52
+#define vsr53 53
+#define vsr54 54
+#define vsr55 55
+#define vsr56 56
+#define vsr57 57
+#define vsr58 58
+#define vsr59 59
+#define vsr60 60
+#define vsr61 61
+#define vsr62 62
+#define vsr63 63
+
/* SPE Registers (EVPRs) */
#define evr0 0