blob: 8f856788a7cf125aef0cf55f8c0ba65b943900d1 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
|
/*
* Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu>
*/
#ifndef _ASM_POWERPC_SWITCH_TO_H
#define _ASM_POWERPC_SWITCH_TO_H
#include <asm/reg.h>
struct thread_struct;
struct task_struct;
struct pt_regs;
extern struct task_struct *__switch_to(struct task_struct *,
struct task_struct *);
#define switch_to(prev, next, last) ((last) = __switch_to((prev), (next)))
struct thread_struct;
extern struct task_struct *_switch(struct thread_struct *prev,
struct thread_struct *next);
extern void enable_kernel_fp(void);
extern void enable_kernel_altivec(void);
extern void enable_kernel_vsx(void);
extern int emulate_altivec(struct pt_regs *);
extern void __giveup_vsx(struct task_struct *);
extern void giveup_vsx(struct task_struct *);
extern void enable_kernel_spe(void);
extern void load_up_spe(struct task_struct *);
extern void giveup_all(struct task_struct *);
extern void switch_booke_debug_regs(struct debug_reg *new_debug);
#ifdef CONFIG_PPC_FPU
extern void flush_fp_to_thread(struct task_struct *);
extern void giveup_fpu(struct task_struct *);
extern void __giveup_fpu(struct task_struct *);
static inline void disable_kernel_fp(void)
{
msr_check_and_clear(MSR_FP);
}
#else
static inline void flush_fp_to_thread(struct task_struct *t) { }
static inline void giveup_fpu(struct task_struct *t) { }
static inline void __giveup_fpu(struct task_struct *t) { }
#endif
#ifdef CONFIG_ALTIVEC
extern void flush_altivec_to_thread(struct task_struct *);
extern void giveup_altivec(struct task_struct *);
extern void __giveup_altivec(struct task_struct *);
static inline void disable_kernel_altivec(void)
{
msr_check_and_clear(MSR_VEC);
}
#else
static inline void flush_altivec_to_thread(struct task_struct *t) { }
static inline void giveup_altivec(struct task_struct *t) { }
static inline void __giveup_altivec(struct task_struct *t) { }
#endif
#ifdef CONFIG_VSX
extern void flush_vsx_to_thread(struct task_struct *);
static inline void disable_kernel_vsx(void)
{
msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
}
#else
static inline void flush_vsx_to_thread(struct task_struct *t)
{
}
#endif
#ifdef CONFIG_SPE
extern void flush_spe_to_thread(struct task_struct *);
extern void giveup_spe(struct task_struct *);
extern void __giveup_spe(struct task_struct *);
static inline void disable_kernel_spe(void)
{
msr_check_and_clear(MSR_SPE);
}
#else
static inline void flush_spe_to_thread(struct task_struct *t) { }
static inline void giveup_spe(struct task_struct *t) { }
static inline void __giveup_spe(struct task_struct *t) { }
#endif
static inline void clear_task_ebb(struct task_struct *t)
{
#ifdef CONFIG_PPC_BOOK3S_64
/* EBB perf events are not inherited, so clear all EBB state. */
t->thread.ebbrr = 0;
t->thread.ebbhr = 0;
t->thread.bescr = 0;
t->thread.mmcr2 = 0;
t->thread.mmcr0 = 0;
t->thread.siar = 0;
t->thread.sdar = 0;
t->thread.sier = 0;
t->thread.used_ebb = 0;
#endif
}
#endif /* _ASM_POWERPC_SWITCH_TO_H */
|