Use `MutexLock` in more places
This commit is contained in:
parent
fd7239cfab
commit
e33fdb4296
|
@ -144,9 +144,8 @@ void RemoteDebuggerPeerTCP::_read_in() {
|
||||||
Error err = decode_variant(var, buf, in_pos, &read);
|
Error err = decode_variant(var, buf, in_pos, &read);
|
||||||
ERR_CONTINUE(read != in_pos || err != OK);
|
ERR_CONTINUE(read != in_pos || err != OK);
|
||||||
ERR_CONTINUE_MSG(var.get_type() != Variant::ARRAY, "Malformed packet received, not an Array.");
|
ERR_CONTINUE_MSG(var.get_type() != Variant::ARRAY, "Malformed packet received, not an Array.");
|
||||||
mutex.lock();
|
MutexLock lock(mutex);
|
||||||
in_queue.push_back(var);
|
in_queue.push_back(var);
|
||||||
mutex.unlock();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,33 +60,33 @@ void Resource::set_path(const String &p_path, bool p_take_over) {
|
||||||
p_take_over = false; // Can't take over an empty path
|
p_take_over = false; // Can't take over an empty path
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceCache::lock.lock();
|
{
|
||||||
|
MutexLock lock(ResourceCache::lock);
|
||||||
|
|
||||||
if (!path_cache.is_empty()) {
|
if (!path_cache.is_empty()) {
|
||||||
ResourceCache::resources.erase(path_cache);
|
ResourceCache::resources.erase(path_cache);
|
||||||
}
|
}
|
||||||
|
|
||||||
path_cache = "";
|
path_cache = "";
|
||||||
|
|
||||||
Ref<Resource> existing = ResourceCache::get_ref(p_path);
|
Ref<Resource> existing = ResourceCache::get_ref(p_path);
|
||||||
|
|
||||||
if (existing.is_valid()) {
|
if (existing.is_valid()) {
|
||||||
if (p_take_over) {
|
if (p_take_over) {
|
||||||
existing->path_cache = String();
|
existing->path_cache = String();
|
||||||
ResourceCache::resources.erase(p_path);
|
ResourceCache::resources.erase(p_path);
|
||||||
} else {
|
} else {
|
||||||
ResourceCache::lock.unlock();
|
ERR_FAIL_MSG("Another resource is loaded from path '" + p_path + "' (possible cyclic resource inclusion).");
|
||||||
ERR_FAIL_MSG("Another resource is loaded from path '" + p_path + "' (possible cyclic resource inclusion).");
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
path_cache = p_path;
|
||||||
|
|
||||||
|
if (!path_cache.is_empty()) {
|
||||||
|
ResourceCache::resources[path_cache] = this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
path_cache = p_path;
|
|
||||||
|
|
||||||
if (!path_cache.is_empty()) {
|
|
||||||
ResourceCache::resources[path_cache] = this;
|
|
||||||
}
|
|
||||||
ResourceCache::lock.unlock();
|
|
||||||
|
|
||||||
_resource_path_changed();
|
_resource_path_changed();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -486,15 +486,13 @@ void Resource::set_as_translation_remapped(bool p_remapped) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceCache::lock.lock();
|
MutexLock lock(ResourceCache::lock);
|
||||||
|
|
||||||
if (p_remapped) {
|
if (p_remapped) {
|
||||||
ResourceLoader::remapped_list.add(&remapped_list);
|
ResourceLoader::remapped_list.add(&remapped_list);
|
||||||
} else {
|
} else {
|
||||||
ResourceLoader::remapped_list.remove(&remapped_list);
|
ResourceLoader::remapped_list.remove(&remapped_list);
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceCache::lock.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef TOOLS_ENABLED
|
#ifdef TOOLS_ENABLED
|
||||||
|
@ -564,14 +562,13 @@ Resource::~Resource() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceCache::lock.lock();
|
MutexLock lock(ResourceCache::lock);
|
||||||
// Only unregister from the cache if this is the actual resource listed there.
|
// Only unregister from the cache if this is the actual resource listed there.
|
||||||
// (Other resources can have the same value in `path_cache` if loaded with `CACHE_IGNORE`.)
|
// (Other resources can have the same value in `path_cache` if loaded with `CACHE_IGNORE`.)
|
||||||
HashMap<String, Resource *>::Iterator E = ResourceCache::resources.find(path_cache);
|
HashMap<String, Resource *>::Iterator E = ResourceCache::resources.find(path_cache);
|
||||||
if (likely(E && E->value == this)) {
|
if (likely(E && E->value == this)) {
|
||||||
ResourceCache::resources.remove(E);
|
ResourceCache::resources.remove(E);
|
||||||
}
|
}
|
||||||
ResourceCache::lock.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
HashMap<String, Resource *> ResourceCache::resources;
|
HashMap<String, Resource *> ResourceCache::resources;
|
||||||
|
@ -600,19 +597,21 @@ void ResourceCache::clear() {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ResourceCache::has(const String &p_path) {
|
bool ResourceCache::has(const String &p_path) {
|
||||||
lock.lock();
|
Resource **res = nullptr;
|
||||||
|
|
||||||
Resource **res = resources.getptr(p_path);
|
{
|
||||||
|
MutexLock mutex_lock(lock);
|
||||||
|
|
||||||
if (res && (*res)->get_reference_count() == 0) {
|
res = resources.getptr(p_path);
|
||||||
// This resource is in the process of being deleted, ignore its existence.
|
|
||||||
(*res)->path_cache = String();
|
if (res && (*res)->get_reference_count() == 0) {
|
||||||
resources.erase(p_path);
|
// This resource is in the process of being deleted, ignore its existence.
|
||||||
res = nullptr;
|
(*res)->path_cache = String();
|
||||||
|
resources.erase(p_path);
|
||||||
|
res = nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lock.unlock();
|
|
||||||
|
|
||||||
if (!res) {
|
if (!res) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -622,28 +621,27 @@ bool ResourceCache::has(const String &p_path) {
|
||||||
|
|
||||||
Ref<Resource> ResourceCache::get_ref(const String &p_path) {
|
Ref<Resource> ResourceCache::get_ref(const String &p_path) {
|
||||||
Ref<Resource> ref;
|
Ref<Resource> ref;
|
||||||
lock.lock();
|
{
|
||||||
|
MutexLock mutex_lock(lock);
|
||||||
|
Resource **res = resources.getptr(p_path);
|
||||||
|
|
||||||
Resource **res = resources.getptr(p_path);
|
if (res) {
|
||||||
|
ref = Ref<Resource>(*res);
|
||||||
|
}
|
||||||
|
|
||||||
if (res) {
|
if (res && !ref.is_valid()) {
|
||||||
ref = Ref<Resource>(*res);
|
// This resource is in the process of being deleted, ignore its existence
|
||||||
|
(*res)->path_cache = String();
|
||||||
|
resources.erase(p_path);
|
||||||
|
res = nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (res && !ref.is_valid()) {
|
|
||||||
// This resource is in the process of being deleted, ignore its existence
|
|
||||||
(*res)->path_cache = String();
|
|
||||||
resources.erase(p_path);
|
|
||||||
res = nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
lock.unlock();
|
|
||||||
|
|
||||||
return ref;
|
return ref;
|
||||||
}
|
}
|
||||||
|
|
||||||
void ResourceCache::get_cached_resources(List<Ref<Resource>> *p_resources) {
|
void ResourceCache::get_cached_resources(List<Ref<Resource>> *p_resources) {
|
||||||
lock.lock();
|
MutexLock mutex_lock(lock);
|
||||||
|
|
||||||
LocalVector<String> to_remove;
|
LocalVector<String> to_remove;
|
||||||
|
|
||||||
|
@ -663,14 +661,9 @@ void ResourceCache::get_cached_resources(List<Ref<Resource>> *p_resources) {
|
||||||
for (const String &E : to_remove) {
|
for (const String &E : to_remove) {
|
||||||
resources.erase(E);
|
resources.erase(E);
|
||||||
}
|
}
|
||||||
|
|
||||||
lock.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int ResourceCache::get_cached_resource_count() {
|
int ResourceCache::get_cached_resource_count() {
|
||||||
lock.lock();
|
MutexLock mutex_lock(lock);
|
||||||
int rc = resources.size();
|
return resources.size();
|
||||||
lock.unlock();
|
|
||||||
|
|
||||||
return rc;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -227,28 +227,27 @@ void ResourceFormatLoader::_bind_methods() {
|
||||||
|
|
||||||
// This should be robust enough to be called redundantly without issues.
|
// This should be robust enough to be called redundantly without issues.
|
||||||
void ResourceLoader::LoadToken::clear() {
|
void ResourceLoader::LoadToken::clear() {
|
||||||
thread_load_mutex.lock();
|
|
||||||
|
|
||||||
WorkerThreadPool::TaskID task_to_await = 0;
|
WorkerThreadPool::TaskID task_to_await = 0;
|
||||||
|
|
||||||
// User-facing tokens shouldn't be deleted until completely claimed.
|
{
|
||||||
DEV_ASSERT(user_rc == 0 && user_path.is_empty());
|
MutexLock thread_load_lock(thread_load_mutex);
|
||||||
|
// User-facing tokens shouldn't be deleted until completely claimed.
|
||||||
|
DEV_ASSERT(user_rc == 0 && user_path.is_empty());
|
||||||
|
|
||||||
if (!local_path.is_empty()) { // Empty is used for the special case where the load task is not registered.
|
if (!local_path.is_empty()) { // Empty is used for the special case where the load task is not registered.
|
||||||
DEV_ASSERT(thread_load_tasks.has(local_path));
|
DEV_ASSERT(thread_load_tasks.has(local_path));
|
||||||
ThreadLoadTask &load_task = thread_load_tasks[local_path];
|
ThreadLoadTask &load_task = thread_load_tasks[local_path];
|
||||||
if (load_task.task_id && !load_task.awaited) {
|
if (load_task.task_id && !load_task.awaited) {
|
||||||
task_to_await = load_task.task_id;
|
task_to_await = load_task.task_id;
|
||||||
|
}
|
||||||
|
// Removing a task which is still in progress would be catastrophic.
|
||||||
|
// Tokens must be alive until the task thread function is done.
|
||||||
|
DEV_ASSERT(load_task.status == THREAD_LOAD_FAILED || load_task.status == THREAD_LOAD_LOADED);
|
||||||
|
thread_load_tasks.erase(local_path);
|
||||||
|
local_path.clear();
|
||||||
}
|
}
|
||||||
// Removing a task which is still in progress would be catastrophic.
|
|
||||||
// Tokens must be alive until the task thread function is done.
|
|
||||||
DEV_ASSERT(load_task.status == THREAD_LOAD_FAILED || load_task.status == THREAD_LOAD_LOADED);
|
|
||||||
thread_load_tasks.erase(local_path);
|
|
||||||
local_path.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
thread_load_mutex.unlock();
|
|
||||||
|
|
||||||
// If task is unused, await it here, locally, now the token data is consistent.
|
// If task is unused, await it here, locally, now the token data is consistent.
|
||||||
if (task_to_await) {
|
if (task_to_await) {
|
||||||
PREPARE_FOR_WTP_WAIT
|
PREPARE_FOR_WTP_WAIT
|
||||||
|
@ -265,7 +264,7 @@ Ref<Resource> ResourceLoader::_load(const String &p_path, const String &p_origin
|
||||||
const String &original_path = p_original_path.is_empty() ? p_path : p_original_path;
|
const String &original_path = p_original_path.is_empty() ? p_path : p_original_path;
|
||||||
load_nesting++;
|
load_nesting++;
|
||||||
if (load_paths_stack.size()) {
|
if (load_paths_stack.size()) {
|
||||||
thread_load_mutex.lock();
|
MutexLock thread_load_lock(thread_load_mutex);
|
||||||
const String &parent_task_path = load_paths_stack.get(load_paths_stack.size() - 1);
|
const String &parent_task_path = load_paths_stack.get(load_paths_stack.size() - 1);
|
||||||
HashMap<String, ThreadLoadTask>::Iterator E = thread_load_tasks.find(parent_task_path);
|
HashMap<String, ThreadLoadTask>::Iterator E = thread_load_tasks.find(parent_task_path);
|
||||||
// Avoid double-tracking, for progress reporting, resources that boil down to a remapped path containing the real payload (e.g., imported resources).
|
// Avoid double-tracking, for progress reporting, resources that boil down to a remapped path containing the real payload (e.g., imported resources).
|
||||||
|
@ -273,7 +272,6 @@ Ref<Resource> ResourceLoader::_load(const String &p_path, const String &p_origin
|
||||||
if (E && !is_remapped_load) {
|
if (E && !is_remapped_load) {
|
||||||
E->value.sub_tasks.insert(p_original_path);
|
E->value.sub_tasks.insert(p_original_path);
|
||||||
}
|
}
|
||||||
thread_load_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
load_paths_stack.push_back(original_path);
|
load_paths_stack.push_back(original_path);
|
||||||
|
|
||||||
|
@ -318,13 +316,13 @@ Ref<Resource> ResourceLoader::_load(const String &p_path, const String &p_origin
|
||||||
void ResourceLoader::_run_load_task(void *p_userdata) {
|
void ResourceLoader::_run_load_task(void *p_userdata) {
|
||||||
ThreadLoadTask &load_task = *(ThreadLoadTask *)p_userdata;
|
ThreadLoadTask &load_task = *(ThreadLoadTask *)p_userdata;
|
||||||
|
|
||||||
thread_load_mutex.lock();
|
{
|
||||||
if (cleaning_tasks) {
|
MutexLock thread_load_lock(thread_load_mutex);
|
||||||
load_task.status = THREAD_LOAD_FAILED;
|
if (cleaning_tasks) {
|
||||||
thread_load_mutex.unlock();
|
load_task.status = THREAD_LOAD_FAILED;
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
thread_load_mutex.unlock();
|
|
||||||
|
|
||||||
// Thread-safe either if it's the current thread or a brand new one.
|
// Thread-safe either if it's the current thread or a brand new one.
|
||||||
CallQueue *own_mq_override = nullptr;
|
CallQueue *own_mq_override = nullptr;
|
||||||
|
@ -1170,18 +1168,18 @@ String ResourceLoader::path_remap(const String &p_path) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void ResourceLoader::reload_translation_remaps() {
|
void ResourceLoader::reload_translation_remaps() {
|
||||||
ResourceCache::lock.lock();
|
|
||||||
|
|
||||||
List<Resource *> to_reload;
|
List<Resource *> to_reload;
|
||||||
SelfList<Resource> *E = remapped_list.first();
|
|
||||||
|
|
||||||
while (E) {
|
{
|
||||||
to_reload.push_back(E->self());
|
MutexLock lock(ResourceCache::lock);
|
||||||
E = E->next();
|
SelfList<Resource> *E = remapped_list.first();
|
||||||
|
|
||||||
|
while (E) {
|
||||||
|
to_reload.push_back(E->self());
|
||||||
|
E = E->next();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceCache::lock.unlock();
|
|
||||||
|
|
||||||
//now just make sure to not delete any of these resources while changing locale..
|
//now just make sure to not delete any of these resources while changing locale..
|
||||||
while (to_reload.front()) {
|
while (to_reload.front()) {
|
||||||
to_reload.front()->get()->reload_from_file();
|
to_reload.front()->get()->reload_from_file();
|
||||||
|
|
|
@ -1904,7 +1904,7 @@ void Object::set_instance_binding(void *p_token, void *p_binding, const GDExtens
|
||||||
|
|
||||||
void *Object::get_instance_binding(void *p_token, const GDExtensionInstanceBindingCallbacks *p_callbacks) {
|
void *Object::get_instance_binding(void *p_token, const GDExtensionInstanceBindingCallbacks *p_callbacks) {
|
||||||
void *binding = nullptr;
|
void *binding = nullptr;
|
||||||
_instance_binding_mutex.lock();
|
MutexLock instance_binding_lock(_instance_binding_mutex);
|
||||||
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
||||||
if (_instance_bindings[i].token == p_token) {
|
if (_instance_bindings[i].token == p_token) {
|
||||||
binding = _instance_bindings[i].binding;
|
binding = _instance_bindings[i].binding;
|
||||||
|
@ -1935,14 +1935,12 @@ void *Object::get_instance_binding(void *p_token, const GDExtensionInstanceBindi
|
||||||
_instance_binding_count++;
|
_instance_binding_count++;
|
||||||
}
|
}
|
||||||
|
|
||||||
_instance_binding_mutex.unlock();
|
|
||||||
|
|
||||||
return binding;
|
return binding;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Object::has_instance_binding(void *p_token) {
|
bool Object::has_instance_binding(void *p_token) {
|
||||||
bool found = false;
|
bool found = false;
|
||||||
_instance_binding_mutex.lock();
|
MutexLock instance_binding_lock(_instance_binding_mutex);
|
||||||
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
||||||
if (_instance_bindings[i].token == p_token) {
|
if (_instance_bindings[i].token == p_token) {
|
||||||
found = true;
|
found = true;
|
||||||
|
@ -1950,14 +1948,12 @@ bool Object::has_instance_binding(void *p_token) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_instance_binding_mutex.unlock();
|
|
||||||
|
|
||||||
return found;
|
return found;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Object::free_instance_binding(void *p_token) {
|
void Object::free_instance_binding(void *p_token) {
|
||||||
bool found = false;
|
bool found = false;
|
||||||
_instance_binding_mutex.lock();
|
MutexLock instance_binding_lock(_instance_binding_mutex);
|
||||||
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
||||||
if (!found && _instance_bindings[i].token == p_token) {
|
if (!found && _instance_bindings[i].token == p_token) {
|
||||||
if (_instance_bindings[i].free_callback) {
|
if (_instance_bindings[i].free_callback) {
|
||||||
|
@ -1976,7 +1972,6 @@ void Object::free_instance_binding(void *p_token) {
|
||||||
if (found) {
|
if (found) {
|
||||||
_instance_binding_count--;
|
_instance_binding_count--;
|
||||||
}
|
}
|
||||||
_instance_binding_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef TOOLS_ENABLED
|
#ifdef TOOLS_ENABLED
|
||||||
|
|
|
@ -667,7 +667,7 @@ protected:
|
||||||
_FORCE_INLINE_ bool _instance_binding_reference(bool p_reference) {
|
_FORCE_INLINE_ bool _instance_binding_reference(bool p_reference) {
|
||||||
bool can_die = true;
|
bool can_die = true;
|
||||||
if (_instance_bindings) {
|
if (_instance_bindings) {
|
||||||
_instance_binding_mutex.lock();
|
MutexLock instance_binding_lock(_instance_binding_mutex);
|
||||||
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
for (uint32_t i = 0; i < _instance_binding_count; i++) {
|
||||||
if (_instance_bindings[i].reference_callback) {
|
if (_instance_bindings[i].reference_callback) {
|
||||||
if (!_instance_bindings[i].reference_callback(_instance_bindings[i].token, _instance_bindings[i].binding, p_reference)) {
|
if (!_instance_bindings[i].reference_callback(_instance_bindings[i].token, _instance_bindings[i].binding, p_reference)) {
|
||||||
|
@ -675,7 +675,6 @@ protected:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_instance_binding_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
return can_die;
|
return can_die;
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,9 +127,8 @@ void WorkerThreadPool::_process_task(Task *p_task) {
|
||||||
|
|
||||||
if (finished_users == max_users) {
|
if (finished_users == max_users) {
|
||||||
// Get rid of the group, because nobody else is using it.
|
// Get rid of the group, because nobody else is using it.
|
||||||
task_mutex.lock();
|
MutexLock task_lock(task_mutex);
|
||||||
group_allocator.free(p_task->group);
|
group_allocator.free(p_task->group);
|
||||||
task_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// For groups, tasks get rid of themselves.
|
// For groups, tasks get rid of themselves.
|
||||||
|
@ -349,17 +348,13 @@ WorkerThreadPool::TaskID WorkerThreadPool::add_task(const Callable &p_action, bo
|
||||||
}
|
}
|
||||||
|
|
||||||
bool WorkerThreadPool::is_task_completed(TaskID p_task_id) const {
|
bool WorkerThreadPool::is_task_completed(TaskID p_task_id) const {
|
||||||
task_mutex.lock();
|
MutexLock task_lock(task_mutex);
|
||||||
const Task *const *taskp = tasks.getptr(p_task_id);
|
const Task *const *taskp = tasks.getptr(p_task_id);
|
||||||
if (!taskp) {
|
if (!taskp) {
|
||||||
task_mutex.unlock();
|
|
||||||
ERR_FAIL_V_MSG(false, "Invalid Task ID"); // Invalid task
|
ERR_FAIL_V_MSG(false, "Invalid Task ID"); // Invalid task
|
||||||
}
|
}
|
||||||
|
|
||||||
bool completed = (*taskp)->completed;
|
return (*taskp)->completed;
|
||||||
task_mutex.unlock();
|
|
||||||
|
|
||||||
return completed;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Error WorkerThreadPool::wait_for_task_completion(TaskID p_task_id) {
|
Error WorkerThreadPool::wait_for_task_completion(TaskID p_task_id) {
|
||||||
|
@ -522,10 +517,9 @@ void WorkerThreadPool::yield() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void WorkerThreadPool::notify_yield_over(TaskID p_task_id) {
|
void WorkerThreadPool::notify_yield_over(TaskID p_task_id) {
|
||||||
task_mutex.lock();
|
MutexLock task_lock(task_mutex);
|
||||||
Task **taskp = tasks.getptr(p_task_id);
|
Task **taskp = tasks.getptr(p_task_id);
|
||||||
if (!taskp) {
|
if (!taskp) {
|
||||||
task_mutex.unlock();
|
|
||||||
ERR_FAIL_MSG("Invalid Task ID.");
|
ERR_FAIL_MSG("Invalid Task ID.");
|
||||||
}
|
}
|
||||||
Task *task = *taskp;
|
Task *task = *taskp;
|
||||||
|
@ -534,7 +528,6 @@ void WorkerThreadPool::notify_yield_over(TaskID p_task_id) {
|
||||||
// This avoids a race condition where a task is created and yield-over called before it's processed.
|
// This avoids a race condition where a task is created and yield-over called before it's processed.
|
||||||
task->pending_notify_yield_over = true;
|
task->pending_notify_yield_over = true;
|
||||||
}
|
}
|
||||||
task_mutex.unlock();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -542,8 +535,6 @@ void WorkerThreadPool::notify_yield_over(TaskID p_task_id) {
|
||||||
td.yield_is_over = true;
|
td.yield_is_over = true;
|
||||||
td.signaled = true;
|
td.signaled = true;
|
||||||
td.cond_var.notify_one();
|
td.cond_var.notify_one();
|
||||||
|
|
||||||
task_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
WorkerThreadPool::GroupID WorkerThreadPool::_add_group_task(const Callable &p_callable, void (*p_func)(void *, uint32_t), void *p_userdata, BaseTemplateUserdata *p_template_userdata, int p_elements, int p_tasks, bool p_high_priority, const String &p_description) {
|
WorkerThreadPool::GroupID WorkerThreadPool::_add_group_task(const Callable &p_callable, void (*p_func)(void *, uint32_t), void *p_userdata, BaseTemplateUserdata *p_template_userdata, int p_elements, int p_tasks, bool p_high_priority, const String &p_description) {
|
||||||
|
@ -601,26 +592,20 @@ WorkerThreadPool::GroupID WorkerThreadPool::add_group_task(const Callable &p_act
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t WorkerThreadPool::get_group_processed_element_count(GroupID p_group) const {
|
uint32_t WorkerThreadPool::get_group_processed_element_count(GroupID p_group) const {
|
||||||
task_mutex.lock();
|
MutexLock task_lock(task_mutex);
|
||||||
const Group *const *groupp = groups.getptr(p_group);
|
const Group *const *groupp = groups.getptr(p_group);
|
||||||
if (!groupp) {
|
if (!groupp) {
|
||||||
task_mutex.unlock();
|
|
||||||
ERR_FAIL_V_MSG(0, "Invalid Group ID");
|
ERR_FAIL_V_MSG(0, "Invalid Group ID");
|
||||||
}
|
}
|
||||||
uint32_t elements = (*groupp)->completed_index.get();
|
return (*groupp)->completed_index.get();
|
||||||
task_mutex.unlock();
|
|
||||||
return elements;
|
|
||||||
}
|
}
|
||||||
bool WorkerThreadPool::is_group_task_completed(GroupID p_group) const {
|
bool WorkerThreadPool::is_group_task_completed(GroupID p_group) const {
|
||||||
task_mutex.lock();
|
MutexLock task_lock(task_mutex);
|
||||||
const Group *const *groupp = groups.getptr(p_group);
|
const Group *const *groupp = groups.getptr(p_group);
|
||||||
if (!groupp) {
|
if (!groupp) {
|
||||||
task_mutex.unlock();
|
|
||||||
ERR_FAIL_V_MSG(false, "Invalid Group ID");
|
ERR_FAIL_V_MSG(false, "Invalid Group ID");
|
||||||
}
|
}
|
||||||
bool completed = (*groupp)->completed.is_set();
|
return (*groupp)->completed.is_set();
|
||||||
task_mutex.unlock();
|
|
||||||
return completed;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void WorkerThreadPool::wait_for_group_task_completion(GroupID p_group) {
|
void WorkerThreadPool::wait_for_group_task_completion(GroupID p_group) {
|
||||||
|
@ -644,15 +629,13 @@ void WorkerThreadPool::wait_for_group_task_completion(GroupID p_group) {
|
||||||
|
|
||||||
if (finished_users == max_users) {
|
if (finished_users == max_users) {
|
||||||
// All tasks using this group are gone (finished before the group), so clear the group too.
|
// All tasks using this group are gone (finished before the group), so clear the group too.
|
||||||
task_mutex.lock();
|
MutexLock task_lock(task_mutex);
|
||||||
group_allocator.free(group);
|
group_allocator.free(group);
|
||||||
task_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task_mutex.lock(); // This mutex is needed when Physics 2D and/or 3D is selected to run on a separate thread.
|
MutexLock task_lock(task_mutex); // This mutex is needed when Physics 2D and/or 3D is selected to run on a separate thread.
|
||||||
groups.erase(p_group);
|
groups.erase(p_group);
|
||||||
task_mutex.unlock();
|
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -191,11 +191,10 @@ StringName::StringName(const StringName &p_name) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void StringName::assign_static_unique_class_name(StringName *ptr, const char *p_name) {
|
void StringName::assign_static_unique_class_name(StringName *ptr, const char *p_name) {
|
||||||
mutex.lock();
|
MutexLock lock(mutex);
|
||||||
if (*ptr == StringName()) {
|
if (*ptr == StringName()) {
|
||||||
*ptr = StringName(p_name, true);
|
*ptr = StringName(p_name, true);
|
||||||
}
|
}
|
||||||
mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
StringName::StringName(const char *p_name, bool p_static) {
|
StringName::StringName(const char *p_name, bool p_static) {
|
||||||
|
|
|
@ -1855,26 +1855,27 @@ void EditorFileSystem::_update_script_classes() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
update_script_mutex.lock();
|
{
|
||||||
|
MutexLock update_script_lock(update_script_mutex);
|
||||||
|
|
||||||
EditorProgress *ep = nullptr;
|
EditorProgress *ep = nullptr;
|
||||||
if (update_script_paths.size() > 1) {
|
if (update_script_paths.size() > 1) {
|
||||||
ep = memnew(EditorProgress("update_scripts_classes", TTR("Registering global classes..."), update_script_paths.size()));
|
ep = memnew(EditorProgress("update_scripts_classes", TTR("Registering global classes..."), update_script_paths.size()));
|
||||||
}
|
|
||||||
|
|
||||||
int step_count = 0;
|
|
||||||
for (const KeyValue<String, ScriptInfo> &E : update_script_paths) {
|
|
||||||
_register_global_class_script(E.key, E.key, E.value.type, E.value.script_class_name, E.value.script_class_extends, E.value.script_class_icon_path);
|
|
||||||
if (ep) {
|
|
||||||
ep->step(E.value.script_class_name, step_count++, false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int step_count = 0;
|
||||||
|
for (const KeyValue<String, ScriptInfo> &E : update_script_paths) {
|
||||||
|
_register_global_class_script(E.key, E.key, E.value.type, E.value.script_class_name, E.value.script_class_extends, E.value.script_class_icon_path);
|
||||||
|
if (ep) {
|
||||||
|
ep->step(E.value.script_class_name, step_count++, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
memdelete_notnull(ep);
|
||||||
|
|
||||||
|
update_script_paths.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
memdelete_notnull(ep);
|
|
||||||
|
|
||||||
update_script_paths.clear();
|
|
||||||
update_script_mutex.unlock();
|
|
||||||
|
|
||||||
ScriptServer::save_global_classes();
|
ScriptServer::save_global_classes();
|
||||||
EditorNode::get_editor_data().script_class_save_icon_paths();
|
EditorNode::get_editor_data().script_class_save_icon_paths();
|
||||||
|
|
||||||
|
@ -1894,7 +1895,7 @@ void EditorFileSystem::_update_script_documentation() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
update_script_mutex.lock();
|
MutexLock update_script_lock(update_script_mutex);
|
||||||
|
|
||||||
EditorProgress *ep = nullptr;
|
EditorProgress *ep = nullptr;
|
||||||
if (update_script_paths_documentation.size() > 1) {
|
if (update_script_paths_documentation.size() > 1) {
|
||||||
|
@ -1933,7 +1934,6 @@ void EditorFileSystem::_update_script_documentation() {
|
||||||
memdelete_notnull(ep);
|
memdelete_notnull(ep);
|
||||||
|
|
||||||
update_script_paths_documentation.clear();
|
update_script_paths_documentation.clear();
|
||||||
update_script_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void EditorFileSystem::_process_update_pending() {
|
void EditorFileSystem::_process_update_pending() {
|
||||||
|
@ -1945,7 +1945,7 @@ void EditorFileSystem::_process_update_pending() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void EditorFileSystem::_queue_update_script_class(const String &p_path, const String &p_type, const String &p_script_class_name, const String &p_script_class_extends, const String &p_script_class_icon_path) {
|
void EditorFileSystem::_queue_update_script_class(const String &p_path, const String &p_type, const String &p_script_class_name, const String &p_script_class_extends, const String &p_script_class_icon_path) {
|
||||||
update_script_mutex.lock();
|
MutexLock update_script_lock(update_script_mutex);
|
||||||
|
|
||||||
ScriptInfo si;
|
ScriptInfo si;
|
||||||
si.type = p_type;
|
si.type = p_type;
|
||||||
|
@ -1955,8 +1955,6 @@ void EditorFileSystem::_queue_update_script_class(const String &p_path, const St
|
||||||
update_script_paths.insert(p_path, si);
|
update_script_paths.insert(p_path, si);
|
||||||
|
|
||||||
update_script_paths_documentation.insert(p_path);
|
update_script_paths_documentation.insert(p_path);
|
||||||
|
|
||||||
update_script_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void EditorFileSystem::_update_scene_groups() {
|
void EditorFileSystem::_update_scene_groups() {
|
||||||
|
@ -1970,32 +1968,33 @@ void EditorFileSystem::_update_scene_groups() {
|
||||||
}
|
}
|
||||||
int step_count = 0;
|
int step_count = 0;
|
||||||
|
|
||||||
update_scene_mutex.lock();
|
{
|
||||||
for (const String &path : update_scene_paths) {
|
MutexLock update_scene_lock(update_scene_mutex);
|
||||||
ProjectSettings::get_singleton()->remove_scene_groups_cache(path);
|
for (const String &path : update_scene_paths) {
|
||||||
|
ProjectSettings::get_singleton()->remove_scene_groups_cache(path);
|
||||||
|
|
||||||
int index = -1;
|
int index = -1;
|
||||||
EditorFileSystemDirectory *efd = find_file(path, &index);
|
EditorFileSystemDirectory *efd = find_file(path, &index);
|
||||||
|
|
||||||
if (!efd || index < 0) {
|
if (!efd || index < 0) {
|
||||||
// The file was removed.
|
// The file was removed.
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const HashSet<StringName> scene_groups = PackedScene::get_scene_groups(path);
|
||||||
|
if (!scene_groups.is_empty()) {
|
||||||
|
ProjectSettings::get_singleton()->add_scene_groups_cache(path, scene_groups);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ep) {
|
||||||
|
ep->step(efd->files[index]->file, step_count++, false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const HashSet<StringName> scene_groups = PackedScene::get_scene_groups(path);
|
memdelete_notnull(ep);
|
||||||
if (!scene_groups.is_empty()) {
|
update_scene_paths.clear();
|
||||||
ProjectSettings::get_singleton()->add_scene_groups_cache(path, scene_groups);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ep) {
|
|
||||||
ep->step(efd->files[index]->file, step_count++, false);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
memdelete_notnull(ep);
|
|
||||||
update_scene_paths.clear();
|
|
||||||
update_scene_mutex.unlock();
|
|
||||||
|
|
||||||
ProjectSettings::get_singleton()->save_scene_groups_cache();
|
ProjectSettings::get_singleton()->save_scene_groups_cache();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2009,9 +2008,8 @@ void EditorFileSystem::_update_pending_scene_groups() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void EditorFileSystem::_queue_update_scene_groups(const String &p_path) {
|
void EditorFileSystem::_queue_update_scene_groups(const String &p_path) {
|
||||||
update_scene_mutex.lock();
|
MutexLock update_scene_lock(update_scene_mutex);
|
||||||
update_scene_paths.insert(p_path);
|
update_scene_paths.insert(p_path);
|
||||||
update_scene_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void EditorFileSystem::_get_all_scenes(EditorFileSystemDirectory *p_dir, HashSet<String> &r_list) {
|
void EditorFileSystem::_get_all_scenes(EditorFileSystemDirectory *p_dir, HashSet<String> &r_list) {
|
||||||
|
|
|
@ -87,57 +87,55 @@ void NavMeshGenerator2D::sync() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
baking_navmesh_mutex.lock();
|
MutexLock baking_navmesh_lock(baking_navmesh_mutex);
|
||||||
generator_task_mutex.lock();
|
{
|
||||||
|
MutexLock generator_task_lock(generator_task_mutex);
|
||||||
|
|
||||||
LocalVector<WorkerThreadPool::TaskID> finished_task_ids;
|
LocalVector<WorkerThreadPool::TaskID> finished_task_ids;
|
||||||
|
|
||||||
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask2D *> &E : generator_tasks) {
|
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask2D *> &E : generator_tasks) {
|
||||||
if (WorkerThreadPool::get_singleton()->is_task_completed(E.key)) {
|
if (WorkerThreadPool::get_singleton()->is_task_completed(E.key)) {
|
||||||
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
||||||
finished_task_ids.push_back(E.key);
|
finished_task_ids.push_back(E.key);
|
||||||
|
|
||||||
NavMeshGeneratorTask2D *generator_task = E.value;
|
NavMeshGeneratorTask2D *generator_task = E.value;
|
||||||
DEV_ASSERT(generator_task->status == NavMeshGeneratorTask2D::TaskStatus::BAKING_FINISHED);
|
DEV_ASSERT(generator_task->status == NavMeshGeneratorTask2D::TaskStatus::BAKING_FINISHED);
|
||||||
|
|
||||||
baking_navmeshes.erase(generator_task->navigation_mesh);
|
baking_navmeshes.erase(generator_task->navigation_mesh);
|
||||||
if (generator_task->callback.is_valid()) {
|
if (generator_task->callback.is_valid()) {
|
||||||
generator_emit_callback(generator_task->callback);
|
generator_emit_callback(generator_task->callback);
|
||||||
|
}
|
||||||
|
memdelete(generator_task);
|
||||||
}
|
}
|
||||||
memdelete(generator_task);
|
}
|
||||||
|
|
||||||
|
for (WorkerThreadPool::TaskID finished_task_id : finished_task_ids) {
|
||||||
|
generator_tasks.erase(finished_task_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (WorkerThreadPool::TaskID finished_task_id : finished_task_ids) {
|
|
||||||
generator_tasks.erase(finished_task_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
generator_task_mutex.unlock();
|
|
||||||
baking_navmesh_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NavMeshGenerator2D::cleanup() {
|
void NavMeshGenerator2D::cleanup() {
|
||||||
baking_navmesh_mutex.lock();
|
MutexLock baking_navmesh_lock(baking_navmesh_mutex);
|
||||||
generator_task_mutex.lock();
|
{
|
||||||
|
MutexLock generator_task_lock(generator_task_mutex);
|
||||||
|
|
||||||
baking_navmeshes.clear();
|
baking_navmeshes.clear();
|
||||||
|
|
||||||
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask2D *> &E : generator_tasks) {
|
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask2D *> &E : generator_tasks) {
|
||||||
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
||||||
NavMeshGeneratorTask2D *generator_task = E.value;
|
NavMeshGeneratorTask2D *generator_task = E.value;
|
||||||
memdelete(generator_task);
|
memdelete(generator_task);
|
||||||
|
}
|
||||||
|
generator_tasks.clear();
|
||||||
|
|
||||||
|
generator_rid_rwlock.write_lock();
|
||||||
|
for (NavMeshGeometryParser2D *parser : generator_parsers) {
|
||||||
|
generator_parser_owner.free(parser->self);
|
||||||
|
}
|
||||||
|
generator_parsers.clear();
|
||||||
|
generator_rid_rwlock.write_unlock();
|
||||||
}
|
}
|
||||||
generator_tasks.clear();
|
|
||||||
|
|
||||||
generator_rid_rwlock.write_lock();
|
|
||||||
for (NavMeshGeometryParser2D *parser : generator_parsers) {
|
|
||||||
generator_parser_owner.free(parser->self);
|
|
||||||
}
|
|
||||||
generator_parsers.clear();
|
|
||||||
generator_rid_rwlock.write_unlock();
|
|
||||||
|
|
||||||
generator_task_mutex.unlock();
|
|
||||||
baking_navmesh_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NavMeshGenerator2D::finish() {
|
void NavMeshGenerator2D::finish() {
|
||||||
|
@ -212,7 +210,7 @@ void NavMeshGenerator2D::bake_from_source_geometry_data_async(Ref<NavigationPoly
|
||||||
baking_navmeshes.insert(p_navigation_mesh);
|
baking_navmeshes.insert(p_navigation_mesh);
|
||||||
baking_navmesh_mutex.unlock();
|
baking_navmesh_mutex.unlock();
|
||||||
|
|
||||||
generator_task_mutex.lock();
|
MutexLock generator_task_lock(generator_task_mutex);
|
||||||
NavMeshGeneratorTask2D *generator_task = memnew(NavMeshGeneratorTask2D);
|
NavMeshGeneratorTask2D *generator_task = memnew(NavMeshGeneratorTask2D);
|
||||||
generator_task->navigation_mesh = p_navigation_mesh;
|
generator_task->navigation_mesh = p_navigation_mesh;
|
||||||
generator_task->source_geometry_data = p_source_geometry_data;
|
generator_task->source_geometry_data = p_source_geometry_data;
|
||||||
|
@ -220,14 +218,11 @@ void NavMeshGenerator2D::bake_from_source_geometry_data_async(Ref<NavigationPoly
|
||||||
generator_task->status = NavMeshGeneratorTask2D::TaskStatus::BAKING_STARTED;
|
generator_task->status = NavMeshGeneratorTask2D::TaskStatus::BAKING_STARTED;
|
||||||
generator_task->thread_task_id = WorkerThreadPool::get_singleton()->add_native_task(&NavMeshGenerator2D::generator_thread_bake, generator_task, NavMeshGenerator2D::baking_use_high_priority_threads, "NavMeshGeneratorBake2D");
|
generator_task->thread_task_id = WorkerThreadPool::get_singleton()->add_native_task(&NavMeshGenerator2D::generator_thread_bake, generator_task, NavMeshGenerator2D::baking_use_high_priority_threads, "NavMeshGeneratorBake2D");
|
||||||
generator_tasks.insert(generator_task->thread_task_id, generator_task);
|
generator_tasks.insert(generator_task->thread_task_id, generator_task);
|
||||||
generator_task_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool NavMeshGenerator2D::is_baking(Ref<NavigationPolygon> p_navigation_polygon) {
|
bool NavMeshGenerator2D::is_baking(Ref<NavigationPolygon> p_navigation_polygon) {
|
||||||
baking_navmesh_mutex.lock();
|
MutexLock baking_navmesh_lock(baking_navmesh_mutex);
|
||||||
bool baking = baking_navmeshes.has(p_navigation_polygon);
|
return baking_navmeshes.has(p_navigation_polygon);
|
||||||
baking_navmesh_mutex.unlock();
|
|
||||||
return baking;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NavMeshGenerator2D::generator_thread_bake(void *p_arg) {
|
void NavMeshGenerator2D::generator_thread_bake(void *p_arg) {
|
||||||
|
|
|
@ -100,57 +100,55 @@ void NavMeshGenerator3D::sync() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
baking_navmesh_mutex.lock();
|
MutexLock baking_navmesh_lock(baking_navmesh_mutex);
|
||||||
generator_task_mutex.lock();
|
{
|
||||||
|
MutexLock generator_task_lock(generator_task_mutex);
|
||||||
|
|
||||||
LocalVector<WorkerThreadPool::TaskID> finished_task_ids;
|
LocalVector<WorkerThreadPool::TaskID> finished_task_ids;
|
||||||
|
|
||||||
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask3D *> &E : generator_tasks) {
|
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask3D *> &E : generator_tasks) {
|
||||||
if (WorkerThreadPool::get_singleton()->is_task_completed(E.key)) {
|
if (WorkerThreadPool::get_singleton()->is_task_completed(E.key)) {
|
||||||
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
||||||
finished_task_ids.push_back(E.key);
|
finished_task_ids.push_back(E.key);
|
||||||
|
|
||||||
NavMeshGeneratorTask3D *generator_task = E.value;
|
NavMeshGeneratorTask3D *generator_task = E.value;
|
||||||
DEV_ASSERT(generator_task->status == NavMeshGeneratorTask3D::TaskStatus::BAKING_FINISHED);
|
DEV_ASSERT(generator_task->status == NavMeshGeneratorTask3D::TaskStatus::BAKING_FINISHED);
|
||||||
|
|
||||||
baking_navmeshes.erase(generator_task->navigation_mesh);
|
baking_navmeshes.erase(generator_task->navigation_mesh);
|
||||||
if (generator_task->callback.is_valid()) {
|
if (generator_task->callback.is_valid()) {
|
||||||
generator_emit_callback(generator_task->callback);
|
generator_emit_callback(generator_task->callback);
|
||||||
|
}
|
||||||
|
memdelete(generator_task);
|
||||||
}
|
}
|
||||||
memdelete(generator_task);
|
}
|
||||||
|
|
||||||
|
for (WorkerThreadPool::TaskID finished_task_id : finished_task_ids) {
|
||||||
|
generator_tasks.erase(finished_task_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (WorkerThreadPool::TaskID finished_task_id : finished_task_ids) {
|
|
||||||
generator_tasks.erase(finished_task_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
generator_task_mutex.unlock();
|
|
||||||
baking_navmesh_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NavMeshGenerator3D::cleanup() {
|
void NavMeshGenerator3D::cleanup() {
|
||||||
baking_navmesh_mutex.lock();
|
MutexLock baking_navmesh_lock(baking_navmesh_mutex);
|
||||||
generator_task_mutex.lock();
|
{
|
||||||
|
MutexLock generator_task_lock(generator_task_mutex);
|
||||||
|
|
||||||
baking_navmeshes.clear();
|
baking_navmeshes.clear();
|
||||||
|
|
||||||
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask3D *> &E : generator_tasks) {
|
for (KeyValue<WorkerThreadPool::TaskID, NavMeshGeneratorTask3D *> &E : generator_tasks) {
|
||||||
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
WorkerThreadPool::get_singleton()->wait_for_task_completion(E.key);
|
||||||
NavMeshGeneratorTask3D *generator_task = E.value;
|
NavMeshGeneratorTask3D *generator_task = E.value;
|
||||||
memdelete(generator_task);
|
memdelete(generator_task);
|
||||||
|
}
|
||||||
|
generator_tasks.clear();
|
||||||
|
|
||||||
|
generator_rid_rwlock.write_lock();
|
||||||
|
for (NavMeshGeometryParser3D *parser : generator_parsers) {
|
||||||
|
generator_parser_owner.free(parser->self);
|
||||||
|
}
|
||||||
|
generator_parsers.clear();
|
||||||
|
generator_rid_rwlock.write_unlock();
|
||||||
}
|
}
|
||||||
generator_tasks.clear();
|
|
||||||
|
|
||||||
generator_rid_rwlock.write_lock();
|
|
||||||
for (NavMeshGeometryParser3D *parser : generator_parsers) {
|
|
||||||
generator_parser_owner.free(parser->self);
|
|
||||||
}
|
|
||||||
generator_parsers.clear();
|
|
||||||
generator_rid_rwlock.write_unlock();
|
|
||||||
|
|
||||||
generator_task_mutex.unlock();
|
|
||||||
baking_navmesh_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NavMeshGenerator3D::finish() {
|
void NavMeshGenerator3D::finish() {
|
||||||
|
@ -226,7 +224,7 @@ void NavMeshGenerator3D::bake_from_source_geometry_data_async(Ref<NavigationMesh
|
||||||
baking_navmeshes.insert(p_navigation_mesh);
|
baking_navmeshes.insert(p_navigation_mesh);
|
||||||
baking_navmesh_mutex.unlock();
|
baking_navmesh_mutex.unlock();
|
||||||
|
|
||||||
generator_task_mutex.lock();
|
MutexLock generator_task_lock(generator_task_mutex);
|
||||||
NavMeshGeneratorTask3D *generator_task = memnew(NavMeshGeneratorTask3D);
|
NavMeshGeneratorTask3D *generator_task = memnew(NavMeshGeneratorTask3D);
|
||||||
generator_task->navigation_mesh = p_navigation_mesh;
|
generator_task->navigation_mesh = p_navigation_mesh;
|
||||||
generator_task->source_geometry_data = p_source_geometry_data;
|
generator_task->source_geometry_data = p_source_geometry_data;
|
||||||
|
@ -234,14 +232,11 @@ void NavMeshGenerator3D::bake_from_source_geometry_data_async(Ref<NavigationMesh
|
||||||
generator_task->status = NavMeshGeneratorTask3D::TaskStatus::BAKING_STARTED;
|
generator_task->status = NavMeshGeneratorTask3D::TaskStatus::BAKING_STARTED;
|
||||||
generator_task->thread_task_id = WorkerThreadPool::get_singleton()->add_native_task(&NavMeshGenerator3D::generator_thread_bake, generator_task, NavMeshGenerator3D::baking_use_high_priority_threads, SNAME("NavMeshGeneratorBake3D"));
|
generator_task->thread_task_id = WorkerThreadPool::get_singleton()->add_native_task(&NavMeshGenerator3D::generator_thread_bake, generator_task, NavMeshGenerator3D::baking_use_high_priority_threads, SNAME("NavMeshGeneratorBake3D"));
|
||||||
generator_tasks.insert(generator_task->thread_task_id, generator_task);
|
generator_tasks.insert(generator_task->thread_task_id, generator_task);
|
||||||
generator_task_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool NavMeshGenerator3D::is_baking(Ref<NavigationMesh> p_navigation_mesh) {
|
bool NavMeshGenerator3D::is_baking(Ref<NavigationMesh> p_navigation_mesh) {
|
||||||
baking_navmesh_mutex.lock();
|
MutexLock baking_navmesh_lock(baking_navmesh_mutex);
|
||||||
bool baking = baking_navmeshes.has(p_navigation_mesh);
|
return baking_navmeshes.has(p_navigation_mesh);
|
||||||
baking_navmesh_mutex.unlock();
|
|
||||||
return baking;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NavMeshGenerator3D::generator_thread_bake(void *p_arg) {
|
void NavMeshGenerator3D::generator_thread_bake(void *p_arg) {
|
||||||
|
|
|
@ -138,7 +138,7 @@ void FogMaterial::cleanup_shader() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void FogMaterial::_update_shader() {
|
void FogMaterial::_update_shader() {
|
||||||
shader_mutex.lock();
|
MutexLock shader_lock(shader_mutex);
|
||||||
if (shader.is_null()) {
|
if (shader.is_null()) {
|
||||||
shader = RS::get_singleton()->shader_create();
|
shader = RS::get_singleton()->shader_create();
|
||||||
|
|
||||||
|
@ -165,7 +165,6 @@ void fog() {
|
||||||
}
|
}
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
shader_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
FogMaterial::FogMaterial() {
|
FogMaterial::FogMaterial() {
|
||||||
|
|
|
@ -269,7 +269,7 @@ void ProceduralSkyMaterial::cleanup_shader() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void ProceduralSkyMaterial::_update_shader() {
|
void ProceduralSkyMaterial::_update_shader() {
|
||||||
shader_mutex.lock();
|
MutexLock shader_lock(shader_mutex);
|
||||||
if (shader_cache[0].is_null()) {
|
if (shader_cache[0].is_null()) {
|
||||||
for (int i = 0; i < 2; i++) {
|
for (int i = 0; i < 2; i++) {
|
||||||
shader_cache[i] = RS::get_singleton()->shader_create();
|
shader_cache[i] = RS::get_singleton()->shader_create();
|
||||||
|
@ -354,7 +354,6 @@ void sky() {
|
||||||
i ? "render_mode use_debanding;" : ""));
|
i ? "render_mode use_debanding;" : ""));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
shader_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ProceduralSkyMaterial::ProceduralSkyMaterial() {
|
ProceduralSkyMaterial::ProceduralSkyMaterial() {
|
||||||
|
@ -463,7 +462,7 @@ void PanoramaSkyMaterial::cleanup_shader() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void PanoramaSkyMaterial::_update_shader() {
|
void PanoramaSkyMaterial::_update_shader() {
|
||||||
shader_mutex.lock();
|
MutexLock shader_lock(shader_mutex);
|
||||||
if (shader_cache[0].is_null()) {
|
if (shader_cache[0].is_null()) {
|
||||||
for (int i = 0; i < 2; i++) {
|
for (int i = 0; i < 2; i++) {
|
||||||
shader_cache[i] = RS::get_singleton()->shader_create();
|
shader_cache[i] = RS::get_singleton()->shader_create();
|
||||||
|
@ -484,8 +483,6 @@ void sky() {
|
||||||
i ? "filter_linear" : "filter_nearest"));
|
i ? "filter_linear" : "filter_nearest"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
shader_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PanoramaSkyMaterial::PanoramaSkyMaterial() {
|
PanoramaSkyMaterial::PanoramaSkyMaterial() {
|
||||||
|
@ -692,7 +689,7 @@ void PhysicalSkyMaterial::cleanup_shader() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void PhysicalSkyMaterial::_update_shader() {
|
void PhysicalSkyMaterial::_update_shader() {
|
||||||
shader_mutex.lock();
|
MutexLock shader_lock(shader_mutex);
|
||||||
if (shader_cache[0].is_null()) {
|
if (shader_cache[0].is_null()) {
|
||||||
for (int i = 0; i < 2; i++) {
|
for (int i = 0; i < 2; i++) {
|
||||||
shader_cache[i] = RS::get_singleton()->shader_create();
|
shader_cache[i] = RS::get_singleton()->shader_create();
|
||||||
|
@ -785,8 +782,6 @@ void sky() {
|
||||||
i ? "render_mode use_debanding;" : ""));
|
i ? "render_mode use_debanding;" : ""));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
shader_mutex.unlock();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PhysicalSkyMaterial::PhysicalSkyMaterial() {
|
PhysicalSkyMaterial::PhysicalSkyMaterial() {
|
||||||
|
|
Loading…
Reference in New Issue