2020-01-29 22:26:24 +00:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2018-2020 Atmosphère-NX
|
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
#pragma once
|
2020-01-31 04:56:24 +00:00
|
|
|
#include <mesosphere/kern_select_cpu.hpp>
|
2020-01-29 22:26:24 +00:00
|
|
|
#include <mesosphere/kern_k_thread.hpp>
|
2020-01-31 04:56:24 +00:00
|
|
|
#include <mesosphere/kern_k_priority_queue.hpp>
|
2020-02-01 00:25:17 +00:00
|
|
|
#include <mesosphere/kern_k_scheduler_lock.hpp>
|
2020-01-29 22:26:24 +00:00
|
|
|
|
|
|
|
namespace ams::kern {
|
|
|
|
|
2020-01-31 04:56:24 +00:00
|
|
|
using KSchedulerPriorityQueue = KPriorityQueue<KThread, cpu::NumCores, ams::svc::LowestThreadPriority, ams::svc::HighestThreadPriority>;
|
|
|
|
static_assert(std::is_same<KSchedulerPriorityQueue::AffinityMaskType, KAffinityMask>::value);
|
|
|
|
static_assert(KSchedulerPriorityQueue::NumCores == cpu::NumCores);
|
|
|
|
static_assert(KSchedulerPriorityQueue::NumPriority == BITSIZEOF(u64));
|
|
|
|
|
2020-02-01 00:25:17 +00:00
|
|
|
class KScopedSchedulerLock;
|
|
|
|
|
2020-01-29 22:26:24 +00:00
|
|
|
class KScheduler {
|
|
|
|
NON_COPYABLE(KScheduler);
|
|
|
|
NON_MOVEABLE(KScheduler);
|
|
|
|
public:
|
2020-02-01 00:25:17 +00:00
|
|
|
using LockType = KAbstractSchedulerLock<KScheduler>;
|
|
|
|
|
2020-01-29 22:26:24 +00:00
|
|
|
struct SchedulingState {
|
|
|
|
std::atomic<bool> needs_scheduling;
|
|
|
|
bool interrupt_task_thread_runnable;
|
|
|
|
bool should_count_idle;
|
|
|
|
u64 idle_count;
|
|
|
|
KThread *highest_priority_thread;
|
|
|
|
void *idle_thread_stack;
|
|
|
|
};
|
2020-02-01 00:25:17 +00:00
|
|
|
private:
|
|
|
|
friend class KScopedSchedulerLock;
|
|
|
|
static inline LockType s_scheduler_lock;
|
2020-01-29 22:26:24 +00:00
|
|
|
private:
|
|
|
|
SchedulingState state;
|
|
|
|
bool is_active;
|
|
|
|
s32 core_id;
|
|
|
|
KThread *prev_thread;
|
|
|
|
u64 last_context_switch_time;
|
|
|
|
KThread *idle_thread;
|
|
|
|
public:
|
|
|
|
KScheduler();
|
|
|
|
/* TODO: Actually implement KScheduler. This is a placeholder. */
|
2020-02-01 00:25:17 +00:00
|
|
|
public:
|
|
|
|
/* API used by KSchedulerLock */
|
|
|
|
static void DisableScheduling();
|
|
|
|
static void EnableScheduling();
|
|
|
|
static u64 UpdateHighestPriorityThreads();
|
|
|
|
static void EnableSchedulingAndSchedule(u64 cores_needing_scheduling);
|
|
|
|
};
|
|
|
|
|
|
|
|
class KScopedSchedulerLock {
|
|
|
|
public:
|
|
|
|
ALWAYS_INLINE KScopedSchedulerLock() { KScheduler::s_scheduler_lock.Lock(); }
|
|
|
|
ALWAYS_INLINE ~KScopedSchedulerLock() { KScheduler::s_scheduler_lock.Unlock(); }
|
2020-01-29 22:26:24 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
}
|