diff --git a/Source/Engine/Core/Delegate.h b/Source/Engine/Core/Delegate.h
index 7e80504e6..1b7a28c48 100644
--- a/Source/Engine/Core/Delegate.h
+++ b/Source/Engine/Core/Delegate.h
@@ -2,48 +2,56 @@
#pragma once
+// Toggles Delegate implementation type (mutex+hashset or atomic+table)
+// [Deprecated on 12.09.2023, expires on 12.09.2024]
+#define DELEGATE_USE_ATOMIC 0
+
#include "Engine/Core/Memory/Allocation.h"
+#include "Engine/Core/Collections/HashFunctions.h"
+#if !DELEGATE_USE_ATOMIC
#include "Engine/Threading/Threading.h"
#include "Engine/Core/Collections/HashSet.h"
+#endif
///
/// The function object that supports binding static, member and lambda functions.
///
template
-class Function
+class Function
{
+public:
friend Delegate;
template
- friend uint32 GetHash(const Function& key);
-public:
+ friend uint32 GetHash(const Function& key);
+
///
/// Signature of the function to call.
///
- typedef ReturnType (*Signature)(Params ...);
+ typedef ReturnType (*Signature)(Params...);
private:
- typedef ReturnType (*StubSignature)(void*, Params ...);
+ typedef ReturnType (*StubSignature)(void*, Params...);
- template
- static ReturnType StaticMethodStub(void*, Params ... params)
+ template
+ static ReturnType StaticMethodStub(void*, Params... params)
{
return (Method)(Forward(params)...);
}
- static ReturnType StaticPointerMethodStub(void* callee, Params ... params)
+ static ReturnType StaticPointerMethodStub(void* callee, Params... params)
{
return reinterpret_cast(callee)(Forward(params)...);
}
- template
- static ReturnType ClassMethodStub(void* callee, Params ... params)
+ template
+ static ReturnType ClassMethodStub(void* callee, Params... params)
{
return (reinterpret_cast(callee)->*Method)(Forward(params)...);
}
- template
- static ReturnType ClassMethodStub(void* callee, Params ... params)
+ template
+ static ReturnType ClassMethodStub(void* callee, Params... params)
{
return (reinterpret_cast(callee)->*Method)(Forward(params)...);
}
@@ -62,6 +70,7 @@ private:
{
Platform::InterlockedIncrement((int64 volatile*)&_lambda->Refs);
}
+
FORCE_INLINE void LambdaDtor()
{
if (Platform::InterlockedDecrement(&_lambda->Refs) == 0)
@@ -70,6 +79,7 @@ private:
Allocator::Free(_lambda);
}
}
+
public:
///
/// Initializes a new instance of the class.
@@ -121,14 +131,14 @@ public:
~Function()
{
if (_lambda)
- LambdaDtor();
+ LambdaDtor();
}
public:
///
/// Binds a static function.
///
- template
+ template
void Bind()
{
if (_lambda)
@@ -142,7 +152,7 @@ public:
/// Binds a member function.
///
/// The object instance.
- template
+ template
void Bind(T* callee)
{
if (_lambda)
@@ -176,8 +186,14 @@ public:
LambdaDtor();
_lambda = (Lambda*)Allocator::Allocate(sizeof(Lambda) + sizeof(T));
_lambda->Refs = 1;
- _lambda->Dtor = [](void* callee) -> void { static_cast(callee)->~T(); };
- _function = [](void* callee, Params ... params) -> ReturnType { return (*static_cast(callee))(Forward(params)...); };
+ _lambda->Dtor = [](void* callee) -> void
+ {
+ static_cast(callee)->~T();
+ };
+ _function = [](void* callee, Params... params) -> ReturnType
+ {
+ return (*static_cast(callee))(Forward(params)...);
+ };
_callee = (byte*)_lambda + sizeof(Lambda);
new(_callee) T(lambda);
}
@@ -208,7 +224,7 @@ public:
///
/// A list of parameters for the function invocation.
/// Function result
- FORCE_INLINE ReturnType operator()(Params ... params) const
+ FORCE_INLINE ReturnType operator()(Params... params) const
{
ASSERT(_function);
return _function(_callee, Forward(params)...);
@@ -260,32 +276,52 @@ public:
///
/// Signature of the function to call.
///
- typedef void (*Signature)(Params ...);
+ typedef void (*Signature)(Params...);
///
/// Template for the function.
///
- using FunctionType = Function;
+ using FunctionType = Function;
protected:
+#if DELEGATE_USE_ATOMIC
+ // Single allocation for list of binded functions. Thread-safe access via atomic operations. Removing binded function simply clears the entry to handle function unregister during invocation.
+ intptr volatile _ptr = 0;
+ intptr volatile _size = 0;
+#else
HashSet* _functions = nullptr;
CriticalSection* _locker;
int32* _lockerRefCount;
- typedef void (*StubSignature)(void*, Params ...);
+#endif
+ typedef void (*StubSignature)(void*, Params...);
public:
Delegate()
{
+#if !DELEGATE_USE_ATOMIC
_locker = New();
_lockerRefCount = New();
*_lockerRefCount = 1;
+#endif
}
Delegate(const Delegate& other)
{
+#if DELEGATE_USE_ATOMIC
+ const intptr newSize = other._size;
+ auto newBindings = (FunctionType*)Allocator::Allocate(newSize * sizeof(FunctionType));
+ Platform::MemoryCopy((void*)newBindings, (const void*)other._ptr, newSize * sizeof(FunctionType));
+ for (intptr i = 0; i < newSize; i++)
+ {
+ FunctionType& f = newBindings[i];
+ if (f._function && f._lambda)
+ f.LambdaCtor();
+ }
+ _ptr = (intptr)newBindings;
+ _size = newSize;
+#else
if (other._functions == nullptr)
return;
-
_functions = New>(*other._functions);
for (auto i = _functions->Begin(); i.IsNotEnd(); ++i)
{
@@ -295,20 +331,41 @@ public:
_locker = other._locker;
_lockerRefCount = other._lockerRefCount;
*_lockerRefCount = *_lockerRefCount + 1;
+#endif
}
Delegate(Delegate&& other) noexcept
{
+#if DELEGATE_USE_ATOMIC
+ _ptr = other._ptr;
+ _size = other._size;
+ other._ptr = 0;
+ other._size = 0;
+#else
_functions = other._functions;
_locker = other._locker;
_lockerRefCount = other._lockerRefCount;
other._functions = nullptr;
other._locker = nullptr;
other._lockerRefCount = nullptr;
+#endif
}
~Delegate()
{
+#if DELEGATE_USE_ATOMIC
+ auto ptr = (FunctionType*)_ptr;
+ if (ptr)
+ {
+ while (_size--)
+ {
+ if (ptr->_lambda)
+ ptr->LambdaDtor();
+ ++ptr;
+ }
+ Allocator::Free((void*)_ptr);
+ }
+#else
int32& lockerRefCount = *_lockerRefCount;
lockerRefCount--;
if (lockerRefCount == 0)
@@ -318,7 +375,6 @@ public:
_locker = nullptr;
_lockerRefCount = nullptr;
}
-
if (_functions != nullptr)
{
for (auto i = _functions->Begin(); i.IsNotEnd(); ++i)
@@ -328,6 +384,7 @@ public:
}
Allocator::Free(_functions);
}
+#endif
}
Delegate& operator=(const Delegate& other)
@@ -335,8 +392,15 @@ public:
if (this != &other)
{
UnbindAll();
+#if DELEGATE_USE_ATOMIC
+ const intptr size = Platform::AtomicRead((intptr volatile*)&other._size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead((intptr volatile*)&other._ptr);
+ for (intptr i = 0; i < size; i++)
+ Bind(bindings[i]);
+#else
for (auto i = other._functions->Begin(); i.IsNotEnd(); ++i)
Bind(i->Item);
+#endif
}
return *this;
}
@@ -345,12 +409,19 @@ public:
{
if (this != &other)
{
+#if DELEGATE_USE_ATOMIC
+ _ptr = other._ptr;
+ _size = other._size;
+ other._ptr = 0;
+ other._size = 0;
+#else
_functions = other._functions;
_locker = other._locker;
_lockerRefCount = other._lockerRefCount;
other._functions = nullptr;
other._locker = nullptr;
other._lockerRefCount = nullptr;
+#endif
}
return *this;
}
@@ -359,7 +430,7 @@ public:
///
/// Binds a static function.
///
- template
+ template
void Bind()
{
FunctionType f;
@@ -371,7 +442,7 @@ public:
/// Binds a member function.
///
/// The object instance.
- template
+ template
void Bind(T* callee)
{
FunctionType f;
@@ -407,16 +478,61 @@ public:
/// The function to bind.
void Bind(const FunctionType& f)
{
+#if DELEGATE_USE_ATOMIC
+ const intptr size = Platform::AtomicRead(&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead(&_ptr);
+ if (bindings)
+ {
+ // Find a first free slot
+ for (intptr i = 0; i < size; i++)
+ {
+ if (Platform::InterlockedCompareExchange((intptr volatile*)&bindings[i]._function, (intptr)f._function, 0) == 0)
+ {
+ bindings[i]._callee = f._callee;
+ bindings[i]._lambda = f._lambda;
+ if (f._lambda)
+ f.LambdaCtor();
+ return;
+ }
+ }
+ }
+
+ // Failed to find an empty slot in the list (empty or too small) so perform reallocation
+ const intptr newSize = size ? size * 2 : 32;
+ auto newBindings = (FunctionType*)Allocator::Allocate(newSize * sizeof(FunctionType));
+ Platform::MemoryCopy(newBindings, bindings, size * sizeof(FunctionType));
+ Platform::MemoryClear(newBindings + size, (newSize - size) * sizeof(FunctionType));
+
+ // Insert function into a first slot after the old list
+ newBindings[size] = f;
+
+ // Set the new list
+ auto oldBindings = (FunctionType*)Platform::InterlockedCompareExchange(&_ptr, (intptr)newBindings, (intptr)bindings);
+ if (oldBindings != bindings)
+ {
+ // Other thread already set the new list so free this list and try again
+ Allocator::Free(newBindings);
+ Bind(f);
+ }
+ else
+ {
+ // Free previous bindings and update list size
+ Platform::AtomicStore(&_size, newSize);
+ // TODO: what is someone read this value before and is using the old table?
+ Allocator::Free(bindings);
+ }
+#else
ScopeLock lock(*_locker);
if (_functions == nullptr)
_functions = New>(32);
_functions->Add(f);
+#endif
}
///
/// Binds a static function (if not bound yet).
///
- template
+ template
void BindUnique()
{
FunctionType f;
@@ -428,7 +544,7 @@ public:
/// Binds a member function (if not bound yet).
///
/// The object instance.
- template
+ template
void BindUnique(T* callee)
{
FunctionType f;
@@ -452,20 +568,30 @@ public:
/// The function to bind.
void BindUnique(const FunctionType& f)
{
- ScopeLock lock(*_locker);
- if (_functions != nullptr)
+ // Skip if already bound
+#if DELEGATE_USE_ATOMIC
+ const intptr size = Platform::AtomicRead(&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead(&_ptr);
+ if (bindings)
{
- // Skip if already bound
- if (_functions->Contains(f))
- return;
+ for (intptr i = 0; i < size; i++)
+ {
+ if (Platform::AtomicRead((intptr volatile*)&bindings[i]._callee) == (intptr)f._callee && Platform::AtomicRead((intptr volatile*)&bindings[i]._function) == (intptr)f._function)
+ return;
+ }
}
+#else
+ ScopeLock lock(*_locker);
+ if (_functions && _functions->Contains(f))
+ return;
+#endif
Bind(f);
}
///
/// Unbinds a static function.
///
- template
+ template
void Unbind()
{
FunctionType f;
@@ -477,7 +603,7 @@ public:
/// Unbinds a member function.
///
/// The object instance.
- template
+ template
void Unbind(T* callee)
{
FunctionType f;
@@ -501,10 +627,35 @@ public:
/// The function to unbind.
void Unbind(const FunctionType& f)
{
+#if DELEGATE_USE_ATOMIC
+ // Find slot with that function
+ const intptr size = Platform::AtomicRead(&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead(&_ptr);
+ for (intptr i = 0; i < size; i++)
+ {
+ if (Platform::AtomicRead((intptr volatile*)&bindings[i]._callee) == (intptr)f._callee && Platform::AtomicRead((intptr volatile*)&bindings[i]._function) == (intptr)f._function)
+ {
+ if (bindings[i]._lambda)
+ {
+ bindings[i].LambdaDtor();
+ bindings[i]._lambda = nullptr;
+ }
+ Platform::AtomicStore((intptr volatile*)&bindings[i]._callee, 0);
+ Platform::AtomicStore((intptr volatile*)&bindings[i]._function, 0);
+ break;
+ }
+ }
+ if ((FunctionType*)Platform::AtomicRead(&_ptr) != bindings)
+ {
+ // Someone changed the bindings list so retry unbind from the new one
+ Unbind(f);
+ }
+#else
if (_functions == nullptr)
return;
ScopeLock lock(*_locker);
_functions->Remove(f);
+#endif
}
///
@@ -512,6 +663,20 @@ public:
///
void UnbindAll()
{
+#if DELEGATE_USE_ATOMIC
+ const intptr size = Platform::AtomicRead(&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead(&_ptr);
+ for (intptr i = 0; i < size; i++)
+ {
+ if (bindings[i]._lambda)
+ {
+ bindings[i].LambdaDtor();
+ bindings[i]._lambda = nullptr;
+ }
+ Platform::AtomicStore((intptr volatile*)&bindings[i]._function, 0);
+ Platform::AtomicStore((intptr volatile*)&bindings[i]._callee, 0);
+ }
+#else
if (_functions == nullptr)
return;
ScopeLock lock(*_locker);
@@ -521,6 +686,7 @@ public:
i->Item.LambdaDtor();
}
_functions->Clear();
+#endif
}
///
@@ -529,10 +695,22 @@ public:
/// The bound functions count.
int32 Count() const
{
+#if DELEGATE_USE_ATOMIC
+ int32 count = 0;
+ const intptr size = Platform::AtomicRead((intptr volatile*)&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead((intptr volatile*)&_ptr);
+ for (intptr i = 0; i < size; i++)
+ {
+ if (Platform::AtomicRead((intptr volatile*)&bindings[i]._function) != 0)
+ count++;
+ }
+ return count;
+#else
if (_functions == nullptr)
return 0;
ScopeLock lock(*_locker);
return _functions->Count();
+#endif
}
///
@@ -540,10 +718,14 @@ public:
///
int32 Capacity() const
{
+#if DELEGATE_USE_ATOMIC
+ return (int32)Platform::AtomicRead((intptr volatile*)&_size);
+#else
if (_functions == nullptr)
return 0;
ScopeLock lock(*_locker);
return _functions->Capacity();
+#endif
}
///
@@ -552,8 +734,19 @@ public:
/// true if any function is bound; otherwise, false.
bool IsBinded() const
{
+#if DELEGATE_USE_ATOMIC
+ const intptr size = Platform::AtomicRead((intptr volatile*)&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead((intptr volatile*)&_ptr);
+ for (intptr i = 0; i < size; i++)
+ {
+ if (Platform::AtomicRead((intptr volatile*)&bindings[i]._function) != 0)
+ return true;
+ }
+ return false;
+#else
ScopeLock lock(*_locker);
return _functions != nullptr && _functions->Count() > 0;
+#endif
}
///
@@ -564,9 +757,24 @@ public:
/// The amount of written items into the output bindings buffer. Can be equal or less than input bindingsCount capacity.
int32 GetBindings(FunctionType* buffer, int32 bufferSize) const
{
- ScopeLock lock(*_locker);
-
int32 count = 0;
+#if DELEGATE_USE_ATOMIC
+ const intptr size = Platform::AtomicRead((intptr volatile*)&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead((intptr volatile*)&_ptr);
+ for (intptr i = 0; i < size && i < bufferSize; i++)
+ {
+ buffer[count]._function = (StubSignature)Platform::AtomicRead((intptr volatile*)&bindings[i]._function);
+ if (buffer[count]._function != nullptr)
+ {
+ buffer[count]._callee = (void*)Platform::AtomicRead((intptr volatile*)&bindings[i]._callee);
+ buffer[count]._lambda = (typename FunctionType::Lambda*)Platform::AtomicRead((intptr volatile*)&bindings[i]._lambda);
+ if (buffer[count]._lambda)
+ buffer[count].LambdaCtor();
+ count++;
+ }
+ }
+#else
+ ScopeLock lock(*_locker);
for (auto i = _functions->Begin(); i.IsNotEnd(); ++i)
{
if (i->Item._function != nullptr)
@@ -579,6 +787,7 @@ public:
count++;
}
}
+#endif
return count;
}
@@ -586,8 +795,20 @@ public:
/// Calls all the bound functions. Supports unbinding of the called functions during invocation.
///
/// A list of parameters for the function invocation.
- void operator()(Params ... params) const
+ void operator()(Params... params) const
{
+#if DELEGATE_USE_ATOMIC
+ const intptr size = Platform::AtomicRead((intptr volatile*)&_size);
+ FunctionType* bindings = (FunctionType*)Platform::AtomicRead((intptr volatile*)&_ptr);
+ for (intptr i = 0; i < size; i++)
+ {
+ auto function = (StubSignature)Platform::AtomicRead((intptr volatile*)&bindings->_function);
+ auto callee = (void*)Platform::AtomicRead((intptr volatile*)&bindings->_callee);
+ if (function != nullptr && function == (StubSignature)Platform::AtomicRead((intptr volatile*)&bindings->_function))
+ function(callee, Forward(params)...);
+ ++bindings;
+ }
+#else
if (_functions == nullptr)
return;
ScopeLock lock(*_locker);
@@ -598,11 +819,12 @@ public:
if (function != nullptr)
function(callee, Forward(params)...);
}
+#endif
}
};
template
-inline uint32 GetHash(const Function& key)
+inline uint32 GetHash(const Function& key)
{
uint32 hash = GetHash((void*)key._callee);
CombineHash(hash, GetHash((void*)key._function));