2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License, version 2, as
4 * published by the Free Software Foundation.
6 * This program is distributed in the hope that it will be useful,
7 * but WITHOUT ANY WARRANTY; without even the implied warranty of
8 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 * GNU General Public License for more details.
11 * You should have received a copy of the GNU General Public License
12 * along with this program; if not, write to the Free Software
13 * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15 * Copyright SUSE Linux Products GmbH 2009
17 * Authors: Alexander Graf <agraf@suse.de>
20 #include <asm/asm-compat.h>
22 #define SHADOW_SLB_ENTRY_LEN 0x10
23 #define OFFSET_ESID(x) (SHADOW_SLB_ENTRY_LEN * x)
24 #define OFFSET_VSID(x) ((SHADOW_SLB_ENTRY_LEN * x) + 8)
26 /******************************************************************************
30 *****************************************************************************/
32 .macro LOAD_GUEST_SEGMENTS
41 * all other volatile GPRS = free except R4, R6
42 * SVCPU[CR] = guest CR
43 * SVCPU[XER] = guest XER
44 * SVCPU[CTR] = guest CTR
45 * SVCPU[LR] = guest LR
50 /* Declare SLB shadow as 0 entries big */
52 ld r11, PACA_SLBSHADOWPTR(r13)
56 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
64 /* Fill SLB with our shadow */
66 lbz r12, SVCPU_SLB_MAX(r3)
68 addi r12, r12, SVCPU_SLB
71 /* for (r11 = kvm_slb; r11 < kvm_slb + kvm_slb_size; r11+=slb_entry) */
79 andis. r9, r10, SLB_ESID_V@h
80 beq slb_loop_enter_skip
94 /******************************************************************************
98 *****************************************************************************/
100 .macro LOAD_HOST_SEGMENTS
102 /* Register usage at this point:
106 * R12 = exit handler id
107 * R13 = shadow vcpu - SHADOW_VCPU_OFF [=PACA on PPC64]
109 * SVCPU[CR] = guest CR
110 * SVCPU[XER] = guest XER
111 * SVCPU[CTR] = guest CTR
112 * SVCPU[LR] = guest LR
116 /* Remove all SLB entries that are in use. */
122 /* Restore bolted entries from the shadow */
124 ld r11, PACA_SLBSHADOWPTR(r13)
128 /* Declare SLB shadow as SLB_NUM_BOLTED entries big */
130 li r8, SLB_NUM_BOLTED
133 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
135 /* Manually load all entries from shadow SLB */
137 li r8, SLBSHADOW_SAVEAREA
138 li r7, SLBSHADOW_SAVEAREA + 8
146 1: addi r7, r7, SHADOW_SLB_ENTRY_LEN
147 addi r8, r8, SHADOW_SLB_ENTRY_LEN