@@ -1261,7 +1261,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_ARCH_206)
1261
1261
1262
1262
/* save FP state */
1263
1263
mr r3, r9
1264
- bl . kvmppc_save_fp
1264
+ bl kvmppc_save_fp
1265
1265
1266
1266
/* Increment yield count if they have a VPA */
1267
1267
ld r8, VCPU_VPA(r9) /* do they have a VPA? */
@@ -1691,7 +1691,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_ARCH_206)
1691
1691
std r31, VCPU_GPR(R31)(r3)
1692
1692
1693
1693
/* save FP state */
1694
- bl . kvmppc_save_fp
1694
+ bl kvmppc_save_fp
1695
1695
1696
1696
/*
1697
1697
* Take a nap until a decrementer or external interrupt occurs,
@@ -1869,8 +1869,12 @@ kvmppc_read_intr:
1869
1869
/*
1870
1870
* Save away FP, VMX and VSX registers.
1871
1871
* r3 = vcpu pointer
1872
+ * N.B. r30 and r31 are volatile across this function,
1873
+ * thus it is not callable from C.
1872
1874
*/
1873
- _GLOBAL(kvmppc_save_fp)
1875
+ kvmppc_save_fp:
1876
+ mflr r30
1877
+ mr r31,r3
1874
1878
mfmsr r5
1875
1879
ori r8,r5,MSR_FP
1876
1880
#ifdef CONFIG_ALTIVEC
@@ -1885,52 +1889,30 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX)
1885
1889
#endif
1886
1890
mtmsrd r8
1887
1891
isync
1888
- #ifdef CONFIG_VSX
1889
- BEGIN_FTR_SECTION
1890
- reg = 0
1891
- .rept 32
1892
- li r6,reg*16 +VCPU_FPRS
1893
- STXVD2X(reg,R6,R3)
1894
- reg = reg + 1
1895
- .endr
1896
- FTR_SECTION_ELSE
1897
- #endif
1898
- reg = 0
1899
- .rept 32
1900
- stfd reg,reg*8 +VCPU_FPRS(r3)
1901
- reg = reg + 1
1902
- .endr
1903
- #ifdef CONFIG_VSX
1904
- ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX)
1905
- #endif
1906
- mffs fr0
1907
- stfd fr0,VCPU_FPSCR(r3)
1908
-
1892
+ addi r3,r3,VCPU_FPRS
1893
+ bl .store_fp_state
1909
1894
#ifdef CONFIG_ALTIVEC
1910
1895
BEGIN_FTR_SECTION
1911
- reg = 0
1912
- .rept 32
1913
- li r6,reg*16 +VCPU_VRS
1914
- stvx reg,r6,r3
1915
- reg = reg + 1
1916
- .endr
1917
- mfvscr vr0
1918
- li r6,VCPU_VSCR
1919
- stvx vr0,r6,r3
1896
+ addi r3,r31,VCPU_VRS
1897
+ bl .store_vr_state
1920
1898
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
1921
1899
#endif
1922
1900
mfspr r6,SPRN_VRSAVE
1923
1901
stw r6,VCPU_VRSAVE(r3)
1902
+ mtlr r30
1924
1903
mtmsrd r5
1925
1904
isync
1926
1905
blr
1927
1906
1928
1907
/*
1929
1908
* Load up FP, VMX and VSX registers
1930
1909
* r4 = vcpu pointer
1910
+ * N.B. r30 and r31 are volatile across this function,
1911
+ * thus it is not callable from C.
1931
1912
*/
1932
- .globl kvmppc_load_fp
1933
1913
kvmppc_load_fp:
1914
+ mflr r30
1915
+ mr r31,r4
1934
1916
mfmsr r9
1935
1917
ori r8,r9,MSR_FP
1936
1918
#ifdef CONFIG_ALTIVEC
@@ -1945,42 +1927,18 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX)
1945
1927
#endif
1946
1928
mtmsrd r8
1947
1929
isync
1948
- lfd fr0,VCPU_FPSCR(r4)
1949
- MTFSF_L(fr0)
1950
- #ifdef CONFIG_VSX
1951
- BEGIN_FTR_SECTION
1952
- reg = 0
1953
- .rept 32
1954
- li r7,reg*16 +VCPU_FPRS
1955
- LXVD2X(reg,R7,R4)
1956
- reg = reg + 1
1957
- .endr
1958
- FTR_SECTION_ELSE
1959
- #endif
1960
- reg = 0
1961
- .rept 32
1962
- lfd reg,reg*8 +VCPU_FPRS(r4)
1963
- reg = reg + 1
1964
- .endr
1965
- #ifdef CONFIG_VSX
1966
- ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX)
1967
- #endif
1968
-
1930
+ addi r3,r4,VCPU_FPRS
1931
+ bl .load_fp_state
1969
1932
#ifdef CONFIG_ALTIVEC
1970
1933
BEGIN_FTR_SECTION
1971
- li r7,VCPU_VSCR
1972
- lvx vr0,r7,r4
1973
- mtvscr vr0
1974
- reg = 0
1975
- .rept 32
1976
- li r7,reg*16 +VCPU_VRS
1977
- lvx reg,r7,r4
1978
- reg = reg + 1
1979
- .endr
1934
+ addi r3,r31,VCPU_VRS
1935
+ bl .load_vr_state
1980
1936
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
1981
1937
#endif
1982
1938
lwz r7,VCPU_VRSAVE(r4)
1983
1939
mtspr SPRN_VRSAVE,r7
1940
+ mtlr r30
1941
+ mr r4,r31
1984
1942
blr
1985
1943
1986
1944
/*
0 commit comments