10 #ifndef _FPU_INTERNAL_H
11 #define _FPU_INTERNAL_H
16 #include <linux/slab.h>
18 #include <asm/cpufeature.h>
19 #include <asm/processor.h>
20 #include <asm/sigcontext.h>
22 #include <asm/uaccess.h>
34 # define user_i387_ia32_struct user_i387_struct
35 # define user32_fxsr_struct user_fxsr_struct
36 # define ia32_setup_frame __setup_frame
37 # define ia32_setup_rt_frame __setup_rt_frame
61 #define xstateregs_active fpregs_active
63 #ifdef CONFIG_MATH_EMULATION
64 # define HAVE_HWFP (boot_cpu_data.hard_math)
71 static inline int is_ia32_compat_frame(
void)
77 static inline int is_ia32_frame(
void)
82 static inline int is_x32_frame(
void)
87 #define X87_FSW_ES (1 << 7)
118 static inline void sanitize_i387_state(
struct task_struct *tsk)
125 #define user_insn(insn, output, input...) \
128 asm volatile(ASM_STAC "\n" \
130 "2: " ASM_CLAC "\n" \
131 ".section .fixup,\"ax\"\n" \
132 "3: movl $-1,%[err]\n" \
135 _ASM_EXTABLE(1b, 3b) \
136 : [err] "=r" (err), output \
141 #define check_insn(insn, output, input...) \
144 asm volatile("1:" #insn "\n\t" \
146 ".section .fixup,\"ax\"\n" \
147 "3: movl $-1,%[err]\n" \
150 _ASM_EXTABLE(1b, 3b) \
151 : [err] "=r" (err), output \
158 return user_insn(fnsave %[fx]; fwait, [
fx]
"=m" (*fx),
"m" (*fx));
164 return user_insn(fxsave %[fx], [fx]
"=m" (*fx),
"m" (*fx));
166 return user_insn(fxsaveq %[fx], [fx]
"=m" (*fx),
"m" (*fx));
169 return user_insn(rex64/fxsave (%[fx]),
"=m" (*fx), [fx]
"R" (fx));
175 return check_insn(fxrstor %[fx],
"=m" (*fx), [fx]
"m" (*fx));
177 return check_insn(fxrstorq %[fx],
"=m" (*fx), [fx]
"m" (*fx));
180 return check_insn(rex64/fxrstor (%[fx]),
"=m" (*fx), [fx]
"R" (fx),
187 return user_insn(fxrstor %[fx],
"=m" (*fx), [fx]
"m" (*fx));
189 return user_insn(fxrstorq %[fx],
"=m" (*fx), [fx]
"m" (*fx));
192 return user_insn(rex64/fxrstor (%[fx]),
"=m" (*fx), [fx]
"R" (fx),
206 static inline void fpu_fxsave(
struct fpu *
fpu)
209 asm volatile(
"fxsave %[fx]" : [
fx]
"=m" (fpu->
state->
fxsave));
211 asm volatile(
"fxsaveq %0" :
"=m" (fpu->
state->
fxsave));
234 asm volatile(
"rex64/fxsave (%[fx])"
244 static inline int fpu_save_init(
struct fpu *fpu)
254 }
else if (use_fxsr()) {
257 asm volatile(
"fnsave %[fx]; fwait"
271 asm volatile(
"fnclex");
277 static inline int __save_init_fpu(
struct task_struct *tsk)
279 return fpu_save_init(&tsk->
thread.fpu);
282 static inline int fpu_restore_checking(
struct fpu *fpu)
285 return fpu_xrstor_checking(&fpu->
state->
xsave);
292 static inline int restore_fpu_checking(
struct task_struct *tsk)
304 return fpu_restore_checking(&tsk->
thread.fpu);
312 static inline int __thread_has_fpu(
struct task_struct *tsk)
314 return tsk->
thread.fpu.has_fpu;
318 static inline void __thread_clear_has_fpu(
struct task_struct *tsk)
320 tsk->
thread.fpu.has_fpu = 0;
325 static inline void __thread_set_has_fpu(
struct task_struct *tsk)
327 tsk->
thread.fpu.has_fpu = 1;
338 static inline void __thread_fpu_end(
struct task_struct *tsk)
340 __thread_clear_has_fpu(tsk);
341 if (!use_eager_fpu())
345 static inline void __thread_fpu_begin(
struct task_struct *tsk)
347 if (!use_eager_fpu())
349 __thread_set_has_fpu(tsk);
352 static inline void __drop_fpu(
struct task_struct *tsk)
354 if (__thread_has_fpu(tsk)) {
356 asm volatile(
"1: fwait\n"
359 __thread_fpu_end(tsk);
363 static inline void drop_fpu(
struct task_struct *tsk)
375 static inline void drop_init_fpu(
struct task_struct *tsk)
377 if (!use_eager_fpu())
408 static inline void __cpu_disable_lazy_restore(
unsigned int cpu)
413 static inline int fpu_lazy_restore(
struct task_struct *
new,
unsigned int cpu)
416 cpu ==
new->thread.fpu.last_cpu;
428 new->fpu_counter > 5);
429 if (__thread_has_fpu(old)) {
430 if (!__save_init_fpu(old))
433 old->
thread.fpu.has_fpu = 0;
438 __thread_set_has_fpu(
new);
440 }
else if (!use_eager_fpu())
444 old->
thread.fpu.last_cpu = ~0;
447 if (!use_eager_fpu() && fpu_lazy_restore(
new, cpu))
451 __thread_fpu_begin(
new);
466 if (
unlikely(restore_fpu_checking(
new)))
477 static inline int xstate_sigframe_size(
void)
482 static inline int restore_xstate_sig(
void __user *
buf,
int ia32_frame)
485 int size = xstate_sigframe_size();
487 if (ia32_frame && use_fxsr()) {
501 static inline void user_fpu_begin(
void)
509 static inline void __save_fpu(
struct task_struct *tsk)
512 xsave_state(&tsk->
thread.fpu.state->xsave, -1);
514 fpu_fxsave(&tsk->
thread.fpu);
520 static inline void save_init_fpu(
struct task_struct *tsk)
524 if (use_eager_fpu()) {
530 __save_init_fpu(tsk);
531 __thread_fpu_end(tsk);
538 static inline unsigned short get_fpu_cwd(
struct task_struct *tsk)
541 return tsk->
thread.fpu.state->fxsave.cwd;
543 return (
unsigned short)tsk->
thread.fpu.state->fsave.cwd;
547 static inline unsigned short get_fpu_swd(
struct task_struct *tsk)
550 return tsk->
thread.fpu.state->fxsave.swd;
552 return (
unsigned short)tsk->
thread.fpu.state->fsave.swd;
556 static inline unsigned short get_fpu_mxcsr(
struct task_struct *tsk)
559 return tsk->
thread.fpu.state->fxsave.mxcsr;
565 static bool fpu_allocated(
struct fpu *fpu)
570 static inline int fpu_alloc(
struct fpu *fpu)
572 if (fpu_allocated(fpu))
581 static inline void fpu_free(
struct fpu *fpu)
591 if (use_eager_fpu()) {
595 struct fpu *dfpu = &dst->
thread.fpu;
596 struct fpu *sfpu = &src->
thread.fpu;
603 static inline unsigned long
604 alloc_mathframe(
unsigned long sp,
int ia32_frame,
unsigned long *buf_fx,
607 unsigned long frame_size = xstate_sigframe_size();
609 *buf_fx = sp =
round_down(sp - frame_size, 64);
610 if (ia32_frame && use_fxsr()) {