Skip to content

Commit 595e4f7

Browse files
paulusmackagraf
authored andcommitted
KVM: PPC: Book3S HV: Use load/store_fp_state functions in HV guest entry/exit
This modifies kvmppc_load_fp and kvmppc_save_fp to use the generic FP/VSX and VMX load/store functions instead of open-coding the FP/VSX/VMX load/store instructions. Since kvmppc_load/save_fp don't follow C calling conventions, we make them private symbols within book3s_hv_rmhandlers.S. Signed-off-by: Paul Mackerras <[email protected]> Signed-off-by: Alexander Graf <[email protected]>
1 parent 99dae3b commit 595e4f7

File tree

2 files changed

+22
-66
lines changed

2 files changed

+22
-66
lines changed

arch/powerpc/kernel/asm-offsets.c

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -426,10 +426,8 @@ int main(void)
426426
DEFINE(VCPU_GPRS, offsetof(struct kvm_vcpu, arch.gpr));
427427
DEFINE(VCPU_VRSAVE, offsetof(struct kvm_vcpu, arch.vrsave));
428428
DEFINE(VCPU_FPRS, offsetof(struct kvm_vcpu, arch.fp.fpr));
429-
DEFINE(VCPU_FPSCR, offsetof(struct kvm_vcpu, arch.fp.fpscr));
430429
#ifdef CONFIG_ALTIVEC
431430
DEFINE(VCPU_VRS, offsetof(struct kvm_vcpu, arch.vr.vr));
432-
DEFINE(VCPU_VSCR, offsetof(struct kvm_vcpu, arch.vr.vscr));
433431
#endif
434432
DEFINE(VCPU_XER, offsetof(struct kvm_vcpu, arch.xer));
435433
DEFINE(VCPU_CTR, offsetof(struct kvm_vcpu, arch.ctr));

arch/powerpc/kvm/book3s_hv_rmhandlers.S

Lines changed: 22 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -1261,7 +1261,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_ARCH_206)
12611261

12621262
/* save FP state */
12631263
mr r3, r9
1264-
bl .kvmppc_save_fp
1264+
bl kvmppc_save_fp
12651265

12661266
/* Increment yield count if they have a VPA */
12671267
ld r8, VCPU_VPA(r9) /* do they have a VPA? */
@@ -1691,7 +1691,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_ARCH_206)
16911691
std r31, VCPU_GPR(R31)(r3)
16921692

16931693
/* save FP state */
1694-
bl .kvmppc_save_fp
1694+
bl kvmppc_save_fp
16951695

16961696
/*
16971697
* Take a nap until a decrementer or external interrupt occurs,
@@ -1869,8 +1869,12 @@ kvmppc_read_intr:
18691869
/*
18701870
* Save away FP, VMX and VSX registers.
18711871
* r3 = vcpu pointer
1872+
* N.B. r30 and r31 are volatile across this function,
1873+
* thus it is not callable from C.
18721874
*/
1873-
_GLOBAL(kvmppc_save_fp)
1875+
kvmppc_save_fp:
1876+
mflr r30
1877+
mr r31,r3
18741878
mfmsr r5
18751879
ori r8,r5,MSR_FP
18761880
#ifdef CONFIG_ALTIVEC
@@ -1885,52 +1889,30 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX)
18851889
#endif
18861890
mtmsrd r8
18871891
isync
1888-
#ifdef CONFIG_VSX
1889-
BEGIN_FTR_SECTION
1890-
reg = 0
1891-
.rept 32
1892-
li r6,reg*16+VCPU_FPRS
1893-
STXVD2X(reg,R6,R3)
1894-
reg = reg + 1
1895-
.endr
1896-
FTR_SECTION_ELSE
1897-
#endif
1898-
reg = 0
1899-
.rept 32
1900-
stfd reg,reg*8+VCPU_FPRS(r3)
1901-
reg = reg + 1
1902-
.endr
1903-
#ifdef CONFIG_VSX
1904-
ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX)
1905-
#endif
1906-
mffs fr0
1907-
stfd fr0,VCPU_FPSCR(r3)
1908-
1892+
addi r3,r3,VCPU_FPRS
1893+
bl .store_fp_state
19091894
#ifdef CONFIG_ALTIVEC
19101895
BEGIN_FTR_SECTION
1911-
reg = 0
1912-
.rept 32
1913-
li r6,reg*16+VCPU_VRS
1914-
stvx reg,r6,r3
1915-
reg = reg + 1
1916-
.endr
1917-
mfvscr vr0
1918-
li r6,VCPU_VSCR
1919-
stvx vr0,r6,r3
1896+
addi r3,r31,VCPU_VRS
1897+
bl .store_vr_state
19201898
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
19211899
#endif
19221900
mfspr r6,SPRN_VRSAVE
19231901
stw r6,VCPU_VRSAVE(r3)
1902+
mtlr r30
19241903
mtmsrd r5
19251904
isync
19261905
blr
19271906

19281907
/*
19291908
* Load up FP, VMX and VSX registers
19301909
* r4 = vcpu pointer
1910+
* N.B. r30 and r31 are volatile across this function,
1911+
* thus it is not callable from C.
19311912
*/
1932-
.globl kvmppc_load_fp
19331913
kvmppc_load_fp:
1914+
mflr r30
1915+
mr r31,r4
19341916
mfmsr r9
19351917
ori r8,r9,MSR_FP
19361918
#ifdef CONFIG_ALTIVEC
@@ -1945,42 +1927,18 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX)
19451927
#endif
19461928
mtmsrd r8
19471929
isync
1948-
lfd fr0,VCPU_FPSCR(r4)
1949-
MTFSF_L(fr0)
1950-
#ifdef CONFIG_VSX
1951-
BEGIN_FTR_SECTION
1952-
reg = 0
1953-
.rept 32
1954-
li r7,reg*16+VCPU_FPRS
1955-
LXVD2X(reg,R7,R4)
1956-
reg = reg + 1
1957-
.endr
1958-
FTR_SECTION_ELSE
1959-
#endif
1960-
reg = 0
1961-
.rept 32
1962-
lfd reg,reg*8+VCPU_FPRS(r4)
1963-
reg = reg + 1
1964-
.endr
1965-
#ifdef CONFIG_VSX
1966-
ALT_FTR_SECTION_END_IFSET(CPU_FTR_VSX)
1967-
#endif
1968-
1930+
addi r3,r4,VCPU_FPRS
1931+
bl .load_fp_state
19691932
#ifdef CONFIG_ALTIVEC
19701933
BEGIN_FTR_SECTION
1971-
li r7,VCPU_VSCR
1972-
lvx vr0,r7,r4
1973-
mtvscr vr0
1974-
reg = 0
1975-
.rept 32
1976-
li r7,reg*16+VCPU_VRS
1977-
lvx reg,r7,r4
1978-
reg = reg + 1
1979-
.endr
1934+
addi r3,r31,VCPU_VRS
1935+
bl .load_vr_state
19801936
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
19811937
#endif
19821938
lwz r7,VCPU_VRSAVE(r4)
19831939
mtspr SPRN_VRSAVE,r7
1940+
mtlr r30
1941+
mr r4,r31
19841942
blr
19851943

19861944
/*

0 commit comments

Comments
 (0)