aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc
diff options
context:
space:
mode:
authorTony Breeds <tony@bakeyournoodle.com>2011-11-30 16:39:21 -0500
committerJosh Boyer <jwboyer@gmail.com>2011-12-09 07:49:27 -0500
commite32a03290c72773c304ffeebffed0a63c0a672ae (patch)
tree526f104f9451e97b3b9f111bde7568b9db2e6c5b /arch/powerpc
parentca899859f14e510854c5c8908d3ffdfcd34b16de (diff)
powerpc/boot: Add extended precision shifts to the boot wrapper.
The upcomming currituck patches will need to do 64-bit shifts which will fail with undefined symbol without this patch. I looked at linking against libgcc but we can't guarantee that libgcc was compiled with soft-float. Also Using ../lib/div64.S or ../kernel/misc_32.S, this will break the build as the .o's need to be built with different flags for the bootwrapper vs the kernel. So for now the easyest option is to just copy code from arch/powerpc/kernel/misc_32.S I don't think this code changes too often ;P Signed-off-by: Tony Breeds <tony@bakeyournoodle.com> Signed-off-by: Josh Boyer <jwboyer@gmail.com>
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/boot/div64.S52
1 files changed, 52 insertions, 0 deletions
diff --git a/arch/powerpc/boot/div64.S b/arch/powerpc/boot/div64.S
index d271ab542673..bbcb8a4cc121 100644
--- a/arch/powerpc/boot/div64.S
+++ b/arch/powerpc/boot/div64.S
@@ -57,3 +57,55 @@ __div64_32:
57 stw r8,4(r3) 57 stw r8,4(r3)
58 mr r3,r6 # return the remainder in r3 58 mr r3,r6 # return the remainder in r3
59 blr 59 blr
60
61/*
62 * Extended precision shifts.
63 *
64 * Updated to be valid for shift counts from 0 to 63 inclusive.
65 * -- Gabriel
66 *
67 * R3/R4 has 64 bit value
68 * R5 has shift count
69 * result in R3/R4
70 *
71 * ashrdi3: arithmetic right shift (sign propagation)
72 * lshrdi3: logical right shift
73 * ashldi3: left shift
74 */
75 .globl __ashrdi3
76__ashrdi3:
77 subfic r6,r5,32
78 srw r4,r4,r5 # LSW = count > 31 ? 0 : LSW >> count
79 addi r7,r5,32 # could be xori, or addi with -32
80 slw r6,r3,r6 # t1 = count > 31 ? 0 : MSW << (32-count)
81 rlwinm r8,r7,0,32 # t3 = (count < 32) ? 32 : 0
82 sraw r7,r3,r7 # t2 = MSW >> (count-32)
83 or r4,r4,r6 # LSW |= t1
84 slw r7,r7,r8 # t2 = (count < 32) ? 0 : t2
85 sraw r3,r3,r5 # MSW = MSW >> count
86 or r4,r4,r7 # LSW |= t2
87 blr
88
89 .globl __ashldi3
90__ashldi3:
91 subfic r6,r5,32
92 slw r3,r3,r5 # MSW = count > 31 ? 0 : MSW << count
93 addi r7,r5,32 # could be xori, or addi with -32
94 srw r6,r4,r6 # t1 = count > 31 ? 0 : LSW >> (32-count)
95 slw r7,r4,r7 # t2 = count < 32 ? 0 : LSW << (count-32)
96 or r3,r3,r6 # MSW |= t1
97 slw r4,r4,r5 # LSW = LSW << count
98 or r3,r3,r7 # MSW |= t2
99 blr
100
101 .globl __lshrdi3
102__lshrdi3:
103 subfic r6,r5,32
104 srw r4,r4,r5 # LSW = count > 31 ? 0 : LSW >> count
105 addi r7,r5,32 # could be xori, or addi with -32
106 slw r6,r3,r6 # t1 = count > 31 ? 0 : MSW << (32-count)
107 srw r7,r3,r7 # t2 = count < 32 ? 0 : MSW >> (count-32)
108 or r4,r4,r6 # LSW |= t1
109 srw r3,r3,r5 # MSW = MSW >> count
110 or r4,r4,r7 # LSW |= t2
111 blr