sched/core: Move preempt_model_*() helpers from sched.h to preempt.h
Move the declarations and inlined implementations of the preempt_model_*() helpers to preempt.h so that they can be referenced in spinlock.h without creating a potential circular dependency between spinlock.h and sched.h. No functional change intended. Signed-off-by: Sean Christopherson <seanjc@google.com> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Reviewed-by: Ankur Arora <ankur.a.arora@oracle.com> Link: https://lkml.kernel.org/r/20240528003521.979836-2-ankur.a.arora@oracle.compull/914/head
parent
f90cc919f9
commit
f0dc887f21
|
|
@ -481,4 +481,45 @@ DEFINE_LOCK_GUARD_0(preempt, preempt_disable(), preempt_enable())
|
||||||
DEFINE_LOCK_GUARD_0(preempt_notrace, preempt_disable_notrace(), preempt_enable_notrace())
|
DEFINE_LOCK_GUARD_0(preempt_notrace, preempt_disable_notrace(), preempt_enable_notrace())
|
||||||
DEFINE_LOCK_GUARD_0(migrate, migrate_disable(), migrate_enable())
|
DEFINE_LOCK_GUARD_0(migrate, migrate_disable(), migrate_enable())
|
||||||
|
|
||||||
|
#ifdef CONFIG_PREEMPT_DYNAMIC
|
||||||
|
|
||||||
|
extern bool preempt_model_none(void);
|
||||||
|
extern bool preempt_model_voluntary(void);
|
||||||
|
extern bool preempt_model_full(void);
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
static inline bool preempt_model_none(void)
|
||||||
|
{
|
||||||
|
return IS_ENABLED(CONFIG_PREEMPT_NONE);
|
||||||
|
}
|
||||||
|
static inline bool preempt_model_voluntary(void)
|
||||||
|
{
|
||||||
|
return IS_ENABLED(CONFIG_PREEMPT_VOLUNTARY);
|
||||||
|
}
|
||||||
|
static inline bool preempt_model_full(void)
|
||||||
|
{
|
||||||
|
return IS_ENABLED(CONFIG_PREEMPT);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static inline bool preempt_model_rt(void)
|
||||||
|
{
|
||||||
|
return IS_ENABLED(CONFIG_PREEMPT_RT);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Does the preemption model allow non-cooperative preemption?
|
||||||
|
*
|
||||||
|
* For !CONFIG_PREEMPT_DYNAMIC kernels this is an exact match with
|
||||||
|
* CONFIG_PREEMPTION; for CONFIG_PREEMPT_DYNAMIC this doesn't work as the
|
||||||
|
* kernel is *built* with CONFIG_PREEMPTION=y but may run with e.g. the
|
||||||
|
* PREEMPT_NONE model.
|
||||||
|
*/
|
||||||
|
static inline bool preempt_model_preemptible(void)
|
||||||
|
{
|
||||||
|
return preempt_model_full() || preempt_model_rt();
|
||||||
|
}
|
||||||
|
|
||||||
#endif /* __LINUX_PREEMPT_H */
|
#endif /* __LINUX_PREEMPT_H */
|
||||||
|
|
|
||||||
|
|
@ -2064,47 +2064,6 @@ extern int __cond_resched_rwlock_write(rwlock_t *lock);
|
||||||
__cond_resched_rwlock_write(lock); \
|
__cond_resched_rwlock_write(lock); \
|
||||||
})
|
})
|
||||||
|
|
||||||
#ifdef CONFIG_PREEMPT_DYNAMIC
|
|
||||||
|
|
||||||
extern bool preempt_model_none(void);
|
|
||||||
extern bool preempt_model_voluntary(void);
|
|
||||||
extern bool preempt_model_full(void);
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
static inline bool preempt_model_none(void)
|
|
||||||
{
|
|
||||||
return IS_ENABLED(CONFIG_PREEMPT_NONE);
|
|
||||||
}
|
|
||||||
static inline bool preempt_model_voluntary(void)
|
|
||||||
{
|
|
||||||
return IS_ENABLED(CONFIG_PREEMPT_VOLUNTARY);
|
|
||||||
}
|
|
||||||
static inline bool preempt_model_full(void)
|
|
||||||
{
|
|
||||||
return IS_ENABLED(CONFIG_PREEMPT);
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static inline bool preempt_model_rt(void)
|
|
||||||
{
|
|
||||||
return IS_ENABLED(CONFIG_PREEMPT_RT);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Does the preemption model allow non-cooperative preemption?
|
|
||||||
*
|
|
||||||
* For !CONFIG_PREEMPT_DYNAMIC kernels this is an exact match with
|
|
||||||
* CONFIG_PREEMPTION; for CONFIG_PREEMPT_DYNAMIC this doesn't work as the
|
|
||||||
* kernel is *built* with CONFIG_PREEMPTION=y but may run with e.g. the
|
|
||||||
* PREEMPT_NONE model.
|
|
||||||
*/
|
|
||||||
static inline bool preempt_model_preemptible(void)
|
|
||||||
{
|
|
||||||
return preempt_model_full() || preempt_model_rt();
|
|
||||||
}
|
|
||||||
|
|
||||||
static __always_inline bool need_resched(void)
|
static __always_inline bool need_resched(void)
|
||||||
{
|
{
|
||||||
return unlikely(tif_need_resched());
|
return unlikely(tif_need_resched());
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue