diff options
Diffstat (limited to 'arch/powerpc/kvm')
-rw-r--r-- | arch/powerpc/kvm/book3s_hv_rmhandlers.S | 86 |
1 files changed, 22 insertions, 64 deletions
diff --git a/arch/powerpc/kvm/book3s_hv_rmhandlers.S b/arch/powerpc/kvm/book3s_hv_rmhandlers.S index 47fd536fb13b..acbd1d6afba0 100644 --- a/arch/powerpc/kvm/book3s_hv_rmhandlers.S +++ b/arch/powerpc/kvm/book3s_hv_rmhandlers.S | |||
@@ -1261,7 +1261,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_ARCH_206) | |||
1261 | 1261 | ||
1262 | /* save FP state */ | 1262 | /* save FP state */ |
1263 | mr r3, r9 | 1263 | mr r3, r9 |
1264 | bl .kvmppc_save_fp | 1264 | bl kvmppc_save_fp |
1265 | 1265 | ||
1266 | /* Increment yield count if they have a VPA */ | 1266 | /* Increment yield count if they have a VPA */ |
1267 | ld r8, VCPU_VPA(r9) /* do they have a VPA? */ | 1267 | ld r8, VCPU_VPA(r9) /* do they have a VPA? */ |
@@ -1691,7 +1691,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_ARCH_206) | |||
1691 | std r31, VCPU_GPR(R31)(r3) | 1691 | std r31, VCPU_GPR(R31)(r3) |
1692 | 1692 | ||
1693 | /* save FP state */ | 1693 | /* save FP state */ |
1694 | bl .kvmppc_save_fp | 1694 | bl kvmppc_save_fp |
1695 | 1695 | ||
1696 | /* | 1696 | /* |
1697 | * Take a nap until a decrementer or external interrupt occurs, | 1697 | * Take a nap until a decrementer or external interrupt occurs, |
@@ -1869,8 +1869,12 @@ kvmppc_read_intr: | |||
1869 | /* | 1869 | /* |
1870 | * Save away FP, VMX and VSX registers. | 1870 | * Save away FP, VMX and VSX registers. |
1871 | * r3 = vcpu pointer | 1871 | * r3 = vcpu pointer |
1872 | * N.B. r30 and r31 are volatile across this function, | ||
1873 | * thus it is not callable from C. | ||
1872 | */ | 1874 | */ |
1873 | _GLOBAL(kvmppc_save_fp) | 1875 | kvmppc_save_fp: |
1876 | mflr r30 | ||
1877 | mr r31,r3 | ||
1874 | mfmsr r5 | 1878 | mfmsr r5 |
1875 | ori r8,r5,MSR_FP | 1879 | ori r8,r5,MSR_FP |
1876 | #ifdef CONFIG_ALTIVEC | 1880 | #ifdef CONFIG_ALTIVEC |
@@ -1885,42 +1889,17 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX) | |||
1885 | #endif | 1889 | #endif |
1886 | mtmsrd r8 | 1890 | mtmsrd r8 |
1887 | isync | 1891 | isync |
1888 | #ifdef CONFIG_VSX | 1892 | addi r3,r3,VCPU_FPRS |
1889 | BEGIN_FTR_SECTION | 1893 | bl .store_fp_state |
1890 | reg = 0 | ||
1891 | .rept 32 | ||
1892 | li r6,reg*16+VCPU_FPRS | ||
1893 | STXVD2X(reg,R6,R3) | ||
1894 | reg = reg + 1 | ||
1895 | .endr | ||
1896 | FTR_SECTION_ELSE | ||
1897 | #endif | ||
1898 | reg = 0 | ||
1899 | .rept 32 | ||
1900 | stfd reg,reg*8+VCPU_FPRS(r3) | ||
1901 | reg = reg + 1 | ||
1902 | .endr | ||
1903 | #ifdef CONFIG_VSX | ||
1904 | ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX) | ||
1905 | #endif | ||
1906 | mffs fr0 | ||
1907 | stfd fr0,VCPU_FPSCR(r3) | ||
1908 | |||
1909 | #ifdef CONFIG_ALTIVEC | 1894 | #ifdef CONFIG_ALTIVEC |
1910 | BEGIN_FTR_SECTION | 1895 | BEGIN_FTR_SECTION |
1911 | reg = 0 | 1896 | addi r3,r31,VCPU_VRS |
1912 | .rept 32 | 1897 | bl .store_vr_state |
1913 | li r6,reg*16+VCPU_VRS | ||
1914 | stvx reg,r6,r3 | ||
1915 | reg = reg + 1 | ||
1916 | .endr | ||
1917 | mfvscr vr0 | ||
1918 | li r6,VCPU_VSCR | ||
1919 | stvx vr0,r6,r3 | ||
1920 | END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) | 1898 | END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) |
1921 | #endif | 1899 | #endif |
1922 | mfspr r6,SPRN_VRSAVE | 1900 | mfspr r6,SPRN_VRSAVE |
1923 | stw r6,VCPU_VRSAVE(r3) | 1901 | stw r6,VCPU_VRSAVE(r3) |
1902 | mtlr r30 | ||
1924 | mtmsrd r5 | 1903 | mtmsrd r5 |
1925 | isync | 1904 | isync |
1926 | blr | 1905 | blr |
@@ -1928,9 +1907,12 @@ END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) | |||
1928 | /* | 1907 | /* |
1929 | * Load up FP, VMX and VSX registers | 1908 | * Load up FP, VMX and VSX registers |
1930 | * r4 = vcpu pointer | 1909 | * r4 = vcpu pointer |
1910 | * N.B. r30 and r31 are volatile across this function, | ||
1911 | * thus it is not callable from C. | ||
1931 | */ | 1912 | */ |
1932 | .globl kvmppc_load_fp | ||
1933 | kvmppc_load_fp: | 1913 | kvmppc_load_fp: |
1914 | mflr r30 | ||
1915 | mr r31,r4 | ||
1934 | mfmsr r9 | 1916 | mfmsr r9 |
1935 | ori r8,r9,MSR_FP | 1917 | ori r8,r9,MSR_FP |
1936 | #ifdef CONFIG_ALTIVEC | 1918 | #ifdef CONFIG_ALTIVEC |
@@ -1945,42 +1927,18 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX) | |||
1945 | #endif | 1927 | #endif |
1946 | mtmsrd r8 | 1928 | mtmsrd r8 |
1947 | isync | 1929 | isync |
1948 | lfd fr0,VCPU_FPSCR(r4) | 1930 | addi r3,r4,VCPU_FPRS |
1949 | MTFSF_L(fr0) | 1931 | bl .load_fp_state |
1950 | #ifdef CONFIG_VSX | ||
1951 | BEGIN_FTR_SECTION | ||
1952 | reg = 0 | ||
1953 | .rept 32 | ||
1954 | li r7,reg*16+VCPU_FPRS | ||
1955 | LXVD2X(reg,R7,R4) | ||
1956 | reg = reg + 1 | ||
1957 | .endr | ||
1958 | FTR_SECTION_ELSE | ||
1959 | #endif | ||
1960 | reg = 0 | ||
1961 | .rept 32 | ||
1962 | lfd reg,reg*8+VCPU_FPRS(r4) | ||
1963 | reg = reg + 1 | ||
1964 | .endr | ||
1965 | #ifdef CONFIG_VSX | ||
1966 | ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX) | ||
1967 | #endif | ||
1968 | |||
1969 | #ifdef CONFIG_ALTIVEC | 1932 | #ifdef CONFIG_ALTIVEC |
1970 | BEGIN_FTR_SECTION | 1933 | BEGIN_FTR_SECTION |
1971 | li r7,VCPU_VSCR | 1934 | addi r3,r31,VCPU_VRS |
1972 | lvx vr0,r7,r4 | 1935 | bl .load_vr_state |
1973 | mtvscr vr0 | ||
1974 | reg = 0 | ||
1975 | .rept 32 | ||
1976 | li r7,reg*16+VCPU_VRS | ||
1977 | lvx reg,r7,r4 | ||
1978 | reg = reg + 1 | ||
1979 | .endr | ||
1980 | END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) | 1936 | END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) |
1981 | #endif | 1937 | #endif |
1982 | lwz r7,VCPU_VRSAVE(r4) | 1938 | lwz r7,VCPU_VRSAVE(r4) |
1983 | mtspr SPRN_VRSAVE,r7 | 1939 | mtspr SPRN_VRSAVE,r7 |
1940 | mtlr r30 | ||
1941 | mr r4,r31 | ||
1984 | blr | 1942 | blr |
1985 | 1943 | ||
1986 | /* | 1944 | /* |