]>
Commit | Line | Data |
---|---|---|
d5de8841 JF |
1 | /* |
2 | * Split spinlock implementation out into its own file, so it can be | |
3 | * compiled in a FTRACE-compatible way. | |
4 | */ | |
5 | #include <linux/spinlock.h> | |
186f4360 | 6 | #include <linux/export.h> |
96f853ea | 7 | #include <linux/jump_label.h> |
d5de8841 JF |
8 | |
9 | #include <asm/paravirt.h> | |
10 | ||
f233f7f1 PZI |
11 | __visible void __native_queued_spin_unlock(struct qspinlock *lock) |
12 | { | |
13 | native_queued_spin_unlock(lock); | |
14 | } | |
f233f7f1 PZI |
15 | PV_CALLEE_SAVE_REGS_THUNK(__native_queued_spin_unlock); |
16 | ||
17 | bool pv_is_native_spin_unlock(void) | |
18 | { | |
19 | return pv_lock_ops.queued_spin_unlock.func == | |
20 | __raw_callee_save___native_queued_spin_unlock; | |
21 | } | |
f233f7f1 | 22 | |
6c62985d | 23 | __visible bool __native_vcpu_is_preempted(long cpu) |
3cded417 PZ |
24 | { |
25 | return false; | |
26 | } | |
27 | PV_CALLEE_SAVE_REGS_THUNK(__native_vcpu_is_preempted); | |
28 | ||
29 | bool pv_is_native_vcpu_is_preempted(void) | |
446f3dc8 | 30 | { |
3cded417 PZ |
31 | return pv_lock_ops.vcpu_is_preempted.func == |
32 | __raw_callee_save___native_vcpu_is_preempted; | |
446f3dc8 PX |
33 | } |
34 | ||
d5de8841 JF |
35 | struct pv_lock_ops pv_lock_ops = { |
36 | #ifdef CONFIG_SMP | |
f233f7f1 PZI |
37 | .queued_spin_lock_slowpath = native_queued_spin_lock_slowpath, |
38 | .queued_spin_unlock = PV_CALLEE_SAVE(__native_queued_spin_unlock), | |
39 | .wait = paravirt_nop, | |
40 | .kick = paravirt_nop, | |
3cded417 | 41 | .vcpu_is_preempted = PV_CALLEE_SAVE(__native_vcpu_is_preempted), |
f233f7f1 | 42 | #endif /* SMP */ |
d5de8841 | 43 | }; |
25258ef7 | 44 | EXPORT_SYMBOL(pv_lock_ops); |