@@ -1 +1 @@ | |||
Subproject commit 111fccbc4676ce1a5999570ca3c12f1918a9e763 | |||
Subproject commit 800c1a9abe35986fabb6562178e27d3b17c34b5c |
@@ -9,7 +9,7 @@ namespace Svelto.ECS.DataStructures.Unity | |||
/// A collection of <see cref="NativeBag"/> intended to allow one buffer per thread. | |||
/// from: https://github.com/jeffvella/UnityEcsEvents/blob/develop/Runtime/MultiAppendBuffer.cs | |||
/// </summary> | |||
public unsafe struct AtomicRingBuffers:IDisposable | |||
public unsafe struct AtomicNativeBags:IDisposable | |||
{ | |||
public const int DefaultThreadIndex = -1; | |||
public const int MinThreadIndex = DefaultThreadIndex; | |||
@@ -24,7 +24,7 @@ namespace Svelto.ECS.DataStructures.Unity | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
public bool IsInvalidThreadIndex(int index) => index < MinThreadIndex || index > _threadsCount; | |||
public AtomicRingBuffers(Common.Allocator allocator, uint threadsCount) | |||
public AtomicNativeBags(Common.Allocator allocator, uint threadsCount) | |||
{ | |||
Allocator = allocator; | |||
_threadsCount = threadsCount; | |||
@@ -33,7 +33,7 @@ namespace Svelto.ECS.DataStructures.Unity | |||
var bufferCount = _threadsCount; | |||
var allocationSize = bufferSize * bufferCount; | |||
var ptr = (byte*)MemoryUtilities.Alloc((uint) allocationSize, (uint) MemoryUtilities.AlignOf<int>(), allocator); | |||
var ptr = (byte*)MemoryUtilities.Alloc<int>((uint) allocationSize, allocator); | |||
MemoryUtilities.MemClear((IntPtr) ptr, (uint) allocationSize); | |||
for (int i = 0; i < bufferCount; i++) |
@@ -73,9 +73,7 @@ namespace Svelto.ECS.DataStructures | |||
unsafe | |||
{ | |||
var sizeOf = MemoryUtilities.SizeOf<UnsafeBlob>(); | |||
var listData = (UnsafeBlob*) MemoryUtilities.Alloc((uint) sizeOf | |||
, (uint) MemoryUtilities.AlignOf<UnsafeBlob>() | |||
, allocator); | |||
var listData = (UnsafeBlob*) MemoryUtilities.Alloc<UnsafeBlob>((uint) sizeOf, allocator); | |||
//clear to nullify the pointers | |||
MemoryUtilities.MemClear((IntPtr) listData, (uint) sizeOf); | |||
@@ -92,9 +90,7 @@ namespace Svelto.ECS.DataStructures | |||
unsafe | |||
{ | |||
var sizeOf = MemoryUtilities.SizeOf<UnsafeBlob>(); | |||
var listData = (UnsafeBlob*) MemoryUtilities.Alloc((uint) sizeOf | |||
, (uint) MemoryUtilities.AlignOf<UnsafeBlob>() | |||
, allocator); | |||
var listData = (UnsafeBlob*) MemoryUtilities.Alloc<UnsafeBlob>((uint) sizeOf, allocator); | |||
//clear to nullify the pointers | |||
MemoryUtilities.MemClear((IntPtr) listData, (uint) sizeOf); | |||
@@ -127,14 +123,14 @@ namespace Svelto.ECS.DataStructures | |||
#endif | |||
var sizeOf = MemoryUtilities.SizeOf<T>(); | |||
if (_queue->space - sizeOf < 0) | |||
_queue->Realloc((uint) MemoryUtilities.AlignOf<int>(), (uint) ((_queue->capacity + sizeOf) * 1.5f)); | |||
_queue->Realloc<int>((uint) ((_queue->capacity + sizeOf) * 1.5f)); | |||
return ref _queue->Reserve<T>(out index); | |||
} | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
public void Enqueue<T>(in T item) where T : struct | |||
public void Enqueue<T>(in T item) where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
@@ -144,7 +140,7 @@ namespace Svelto.ECS.DataStructures | |||
#endif | |||
var sizeOf = MemoryUtilities.SizeOf<T>(); | |||
if (_queue->space - sizeOf < 0) | |||
_queue->Realloc((uint) MemoryUtilities.AlignOf<int>(), (uint) ((_queue->capacity + sizeOf) * 1.5f)); | |||
_queue->Realloc<int>((uint) ((_queue->capacity + sizeOf) * 1.5f)); | |||
_queue->Write(item); | |||
} | |||
@@ -163,7 +159,7 @@ namespace Svelto.ECS.DataStructures | |||
} | |||
} | |||
public T Dequeue<T>() where T : struct | |||
public T Dequeue<T>() where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
@@ -1,6 +1,5 @@ | |||
using System; | |||
using System.Runtime.CompilerServices; | |||
using System.Runtime.InteropServices; | |||
using Svelto.Common; | |||
using Allocator = Svelto.Common.Allocator; | |||
@@ -23,9 +22,9 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
#endif | |||
return (uint) (_list->count / MemoryUtilities.SizeOf<T>()); | |||
@@ -39,9 +38,9 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
#endif | |||
return (uint) (_list->capacity / MemoryUtilities.SizeOf<T>()); | |||
@@ -57,19 +56,16 @@ namespace Svelto.ECS.DataStructures | |||
rtnStruc.hashType = TypeHash<T>.hash; | |||
#endif | |||
var sizeOf = MemoryUtilities.SizeOf<T>(); | |||
var alignOf = MemoryUtilities.AlignOf<T>(); | |||
uint pointerSize = (uint) MemoryUtilities.SizeOf<UnsafeArray>(); | |||
UnsafeArray* listData = | |||
(UnsafeArray*) MemoryUtilities.Alloc(pointerSize | |||
, (uint) MemoryUtilities.AlignOf<UnsafeArray>(), allocator); | |||
(UnsafeArray*) MemoryUtilities.Alloc<UnsafeArray>(pointerSize, allocator); | |||
//clear to nullify the pointers | |||
MemoryUtilities.MemClear((IntPtr) listData, pointerSize); | |||
listData->allocator = allocator; | |||
listData->Realloc((uint) alignOf, (uint) (newLength * sizeOf)); | |||
listData->Realloc<T>((uint) (newLength * sizeOf)); | |||
rtnStruc._list = listData; | |||
@@ -84,11 +80,11 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
if (index >= Count<T>()) | |||
throw new Exception($"SimpleNativeArray: out of bound access, index {index} count {Count<T>()}"); | |||
throw new Exception($"NativeDynamicArray: out of bound access, index {index} count {Count<T>()}"); | |||
#endif | |||
return ref _list->Get<T>(index); | |||
} | |||
@@ -101,11 +97,11 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
if (index >= Capacity<T>()) | |||
throw new Exception($"SimpleNativeArray: out of bound access, index {index} count {Count<T>()}"); | |||
throw new Exception($"NativeDynamicArray: out of bound access, index {index} count {Count<T>()}"); | |||
#endif | |||
_list->Set(index, value); | |||
} | |||
@@ -113,11 +109,12 @@ namespace Svelto.ECS.DataStructures | |||
public unsafe void Dispose() | |||
{ | |||
if (_list != null) | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
#endif | |||
_list->Dispose(); | |||
_list = null; | |||
} | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
@@ -127,19 +124,34 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
#endif | |||
var structSize = (uint) MemoryUtilities.SizeOf<T>(); | |||
if (_list->space - (int)structSize < 0) | |||
_list->Realloc((uint) MemoryUtilities.AlignOf<T>(), (uint) ((Count<T>() + 1) * structSize * 1.5f)); | |||
_list->Realloc<T>((uint) ((Count<T>() + 1) * structSize * 1.5f)); | |||
//the idea is, considering the wrap, a read pointer must always be behind a writer pointer | |||
#if DEBUG && !PROFILE_SVELTO | |||
_list->Add(item); | |||
} | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
public void AddWithoutGrow<T>(in T item) where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
var structSize = (uint) MemoryUtilities.SizeOf<T>(); | |||
if (_list->space - (int)structSize < 0) | |||
throw new Exception("no writing authorized"); | |||
throw new Exception("NativeDynamicArray: no writing authorized"); | |||
#endif | |||
_list->Add(item); | |||
} | |||
@@ -152,7 +164,7 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
#endif | |||
_list->Clear(); | |||
} | |||
@@ -162,9 +174,9 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
#endif | |||
return (T*) _list->ptr; | |||
@@ -176,18 +188,17 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
#endif | |||
var ret = new T[Count<T>()]; | |||
var handle = GCHandle.Alloc(ret, GCHandleType.Pinned); | |||
Buffer.MemoryCopy(_list->ptr, (void*) handle.AddrOfPinnedObject(), _list->count, _list->count); | |||
handle.Free(); | |||
fixed (void * handle = ret) | |||
{ | |||
Buffer.MemoryCopy(_list->ptr, handle, _list->count, _list->count); | |||
} | |||
return ret; | |||
} | |||
@@ -199,18 +210,17 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_list == null) | |||
throw new Exception("SimpleNativeArray: null-access"); | |||
if (hashType != Svelto.Common.TypeHash<T>.hash) | |||
throw new Exception("SimpleNativeArray: not excepted type used"); | |||
throw new Exception("NativeDynamicArray: null-access"); | |||
if (hashType != TypeHash<T>.hash) | |||
throw new Exception("NativeDynamicArray: not excepted type used"); | |||
#endif | |||
var ret = new T[Capacity<T>()]; | |||
var handle = GCHandle.Alloc(ret, GCHandleType.Pinned); | |||
Buffer.MemoryCopy(_list->ptr, (void*) handle.AddrOfPinnedObject(), _list->capacity, _list->capacity); | |||
handle.Free(); | |||
fixed (void * handle = ret) | |||
{ | |||
Buffer.MemoryCopy(_list->ptr, handle, _list->capacity, _list->capacity); | |||
} | |||
return ret; | |||
} | |||
@@ -11,7 +11,7 @@ namespace Svelto.ECS.DataStructures | |||
#endif | |||
unsafe int* data; | |||
public static implicit operator SharedNativeInt(int t) | |||
public static SharedNativeInt Create(int t) | |||
{ | |||
unsafe | |||
{ | |||
@@ -23,7 +23,7 @@ namespace Svelto.ECS.DataStructures | |||
} | |||
} | |||
public static explicit operator int(SharedNativeInt t) | |||
public static implicit operator int(SharedNativeInt t) | |||
{ | |||
unsafe | |||
{ | |||
@@ -40,27 +40,38 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
unsafe | |||
{ | |||
if (data != null) | |||
{ | |||
Marshal.FreeHGlobal((IntPtr) data); | |||
data = null; | |||
} | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (data == null) | |||
throw new Exception("disposing already disposed data"); | |||
#endif | |||
Marshal.FreeHGlobal((IntPtr) data); | |||
data = null; | |||
} | |||
} | |||
public void Decrement() | |||
public int Decrement() | |||
{ | |||
unsafe | |||
{ | |||
Interlocked.Decrement(ref *data); | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (data == null) | |||
throw new Exception("null-access"); | |||
#endif | |||
return Interlocked.Decrement(ref *data); | |||
} | |||
} | |||
public void Increment() | |||
public int Increment() | |||
{ | |||
unsafe | |||
{ | |||
Interlocked.Increment(ref *data); | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (data == null) | |||
throw new Exception("null-access"); | |||
#endif | |||
return Interlocked.Increment(ref *data); | |||
} | |||
} | |||
} |
@@ -42,9 +42,12 @@ namespace Svelto.ECS.Internal | |||
} | |||
catch (Exception e) | |||
{ | |||
throw new | |||
TypeSafeDictionaryException("trying to add an EntityComponent with the same ID more than once Entity: ".FastConcat(typeof(TValue).ToString()).FastConcat(", group ").FastConcat(groupId).FastConcat(", id ").FastConcat(tuple.Key), | |||
e); | |||
Svelto.Console.LogException(e, | |||
"trying to add an EntityComponent with the same ID more than once Entity: " | |||
.FastConcat(typeof(TValue).ToString()).FastConcat(", group ").FastConcat(groupId) | |||
.FastConcat(", id ").FastConcat(tuple.Key)); | |||
throw; | |||
} | |||
} | |||
} | |||
@@ -7,8 +7,10 @@ namespace Svelto.ECS.DataStructures | |||
struct UnsafeArray : IDisposable | |||
{ | |||
internal unsafe byte* ptr => _ptr; | |||
//expressed in bytes | |||
internal uint capacity { get; private set; } | |||
internal uint capacity => _capacity; | |||
//expressed in bytes | |||
internal uint count => _writeIndex; | |||
//expressed in bytes | |||
@@ -42,7 +44,7 @@ namespace Svelto.ECS.DataStructures | |||
uint writeIndex = (uint) (index * sizeOf); | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (capacity < writeIndex + sizeOf) | |||
if (_capacity < writeIndex + sizeOf) | |||
throw new Exception("no writing authorized"); | |||
#endif | |||
T* buffer = (T*) ptr; | |||
@@ -71,27 +73,27 @@ namespace Svelto.ECS.DataStructures | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
internal void Realloc(uint alignOf, uint newCapacity) | |||
internal void Realloc<T>(uint newCapacity) where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
byte* newPointer = null; | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (capacity > 0 && newCapacity <= capacity) | |||
if (_capacity > 0 && newCapacity <= _capacity) | |||
throw new Exception("new capacity must be bigger than current"); | |||
#endif | |||
if (newCapacity >= 0) | |||
{ | |||
newPointer = (byte*) MemoryUtilities.Alloc(newCapacity, alignOf, allocator); | |||
newPointer = (byte*) MemoryUtilities.Alloc<T>(newCapacity, allocator); | |||
if (count > 0) | |||
MemoryUtilities.MemCpy((IntPtr) newPointer, (IntPtr) ptr, count); | |||
Unsafe.CopyBlock(newPointer, ptr, count); | |||
} | |||
if (ptr != null) | |||
MemoryUtilities.Free((IntPtr) ptr, allocator); | |||
_ptr = newPointer; | |||
capacity = newCapacity; | |||
_capacity = newCapacity; | |||
} | |||
} | |||
@@ -105,7 +107,7 @@ namespace Svelto.ECS.DataStructures | |||
_ptr = null; | |||
_writeIndex = 0; | |||
capacity = 0; | |||
_capacity = 0; | |||
} | |||
} | |||
@@ -120,5 +122,6 @@ namespace Svelto.ECS.DataStructures | |||
#endif | |||
unsafe byte* _ptr; | |||
uint _writeIndex; | |||
uint _capacity; | |||
} | |||
} |
@@ -1,6 +1,7 @@ | |||
using System; | |||
using System.Runtime.CompilerServices; | |||
using Svelto.Common; | |||
using Unity.Collections.LowLevel.Unsafe; | |||
namespace Svelto.ECS.DataStructures | |||
{ | |||
@@ -12,6 +13,7 @@ namespace Svelto.ECS.DataStructures | |||
/// <summary> | |||
/// Note: this must work inside burst, so it must follow burst restrictions | |||
/// Note: All the svelto native structures | |||
/// </summary> | |||
struct UnsafeBlob : IDisposable | |||
{ | |||
@@ -31,7 +33,7 @@ namespace Svelto.ECS.DataStructures | |||
#endif | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
internal void Write<T>(in T item) where T : struct | |||
internal void Write<T>(in T item) where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
@@ -42,33 +44,38 @@ namespace Svelto.ECS.DataStructures | |||
if (space - (int)structSize < 0) | |||
throw new Exception("no writing authorized"); | |||
#endif | |||
var head = _writeIndex % capacity; | |||
var writeHead = _writeIndex % capacity; | |||
if (head + structSize <= capacity) | |||
if (writeHead + structSize <= capacity) | |||
{ | |||
Unsafe.Write(ptr + head, item); | |||
Unsafe.Write(_ptr + writeHead, item); | |||
} | |||
else | |||
//copy with wrap, will start to copy and wrap for the reminder | |||
{ | |||
var byteCountToEnd = capacity - head; | |||
//need a copy to be sure that the GC won't move the data around | |||
T copyItem = item; | |||
void* asPointer = Unsafe.AsPointer(ref copyItem); | |||
MemoryUtilities.MemCpy((IntPtr) (ptr + head), (IntPtr) asPointer, byteCountToEnd); | |||
var restCount = structSize - byteCountToEnd; | |||
//todo: check the difference between unaligned and standard | |||
MemoryUtilities.MemCpy((IntPtr) ptr, (IntPtr) ((byte *)asPointer + byteCountToEnd), restCount); | |||
var byteCountToEnd = capacity - writeHead; | |||
fixed (T* readFrom = &item) | |||
{ | |||
//read and copy the first portion of Item until the end of the stream | |||
Unsafe.CopyBlock(_ptr + writeHead, readFrom, byteCountToEnd); | |||
var restCount = structSize - byteCountToEnd; | |||
//read and copy the remainder | |||
var @from = (byte*) readFrom; | |||
Unsafe.CopyBlock(_ptr, @from + byteCountToEnd, restCount); | |||
} | |||
} | |||
uint paddedStructSize = (uint) Align4(structSize); | |||
uint paddedStructSize = Align4(structSize); | |||
_writeIndex += paddedStructSize; | |||
} | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
internal void WriteUnaligned<T>(in T item) where T : struct | |||
internal void WriteUnaligned<T>(in T item) where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
@@ -82,15 +89,18 @@ namespace Svelto.ECS.DataStructures | |||
var pointer = _writeIndex % capacity; | |||
if (pointer + structSize <= capacity) | |||
Unsafe.Write(ptr + pointer, item); | |||
Unsafe.Write(_ptr + pointer, item); | |||
else | |||
{ | |||
var byteCount = capacity - pointer; | |||
T copyItem = item; | |||
var asPointer = Unsafe.AsPointer(ref copyItem); | |||
Unsafe.CopyBlock(ptr + pointer, asPointer, byteCount); | |||
var restCount = structSize - byteCount; | |||
Unsafe.CopyBlock(ptr, (byte *)asPointer + byteCount, restCount); | |||
fixed (T* readFrom = &item) | |||
{ | |||
Unsafe.CopyBlockUnaligned(_ptr + pointer, readFrom, byteCount); | |||
var restCount = structSize - byteCount; | |||
var @from = (byte*) readFrom; | |||
Unsafe.CopyBlockUnaligned(_ptr, @from + byteCount, restCount); | |||
} | |||
} | |||
_writeIndex += structSize; | |||
@@ -98,7 +108,7 @@ namespace Svelto.ECS.DataStructures | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
internal T Read<T>() where T : struct | |||
internal T Read<T>() where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
@@ -111,7 +121,7 @@ namespace Svelto.ECS.DataStructures | |||
throw new Exception("unexpected read"); | |||
#endif | |||
var head = _readIndex % capacity; | |||
uint paddedStructSize = (uint) Align4(structSize); | |||
uint paddedStructSize = Align4(structSize); | |||
_readIndex += paddedStructSize; | |||
if (_readIndex == _writeIndex) | |||
@@ -125,20 +135,18 @@ namespace Svelto.ECS.DataStructures | |||
if (head + paddedStructSize <= capacity) | |||
{ | |||
return Unsafe.Read<T>(ptr + head); | |||
return Unsafe.Read<T>(_ptr + head); | |||
} | |||
else | |||
{ | |||
T item = default; | |||
var byteCountToEnd = capacity - head; | |||
var asPointer = Unsafe.AsPointer(ref item); | |||
//todo: check the difference between unaligned and standard | |||
MemoryUtilities.MemCpy((IntPtr) asPointer, (IntPtr) (ptr + head), byteCountToEnd); | |||
var restCount = structSize - byteCountToEnd; | |||
MemoryUtilities.MemCpy((IntPtr) ((byte *)asPointer + byteCountToEnd), (IntPtr) ptr, restCount); | |||
return item; | |||
} | |||
T item = default; | |||
T* destination = &item; | |||
var byteCountToEnd = capacity - head; | |||
Unsafe.CopyBlock(destination, _ptr + head, byteCountToEnd); | |||
var restCount = structSize - byteCountToEnd; | |||
Unsafe.CopyBlock((byte*) destination + byteCountToEnd, ptr, restCount); | |||
return item; | |||
} | |||
} | |||
@@ -149,18 +157,18 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
var sizeOf = (uint) MemoryUtilities.SizeOf<T>(); | |||
T* buffer = (T *)(byte*) (ptr + _writeIndex); | |||
T* buffer = (T *)(_ptr + _writeIndex); | |||
#if DEBUG && !PROFILE_SVELTO | |||
if (_writeIndex > capacity) throw new Exception($"can't reserve if the writeIndex wrapped around the capacity, writeIndex {_writeIndex} capacity {capacity}"); | |||
if (_writeIndex + sizeOf > capacity) throw new Exception("out of bound reserving"); | |||
#endif | |||
index = new UnsafeArrayIndex() | |||
index = new UnsafeArrayIndex | |||
{ | |||
capacity = capacity | |||
, index = _writeIndex | |||
}; | |||
var align4 = (uint) Align4(sizeOf); | |||
var align4 = Align4(sizeOf); | |||
_writeIndex += align4; | |||
return ref buffer[0]; | |||
@@ -176,14 +184,14 @@ namespace Svelto.ECS.DataStructures | |||
var size = MemoryUtilities.SizeOf<T>(); | |||
if (index.index + size > capacity) throw new Exception($"out of bound access, index {index.index} size {size} capacity {capacity}"); | |||
#endif | |||
T* buffer = (T*) (byte*)(ptr + index.index); | |||
T* buffer = (T*) (_ptr + index.index); | |||
return ref buffer[0]; | |||
} | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
internal void Realloc(uint alignOf, uint newCapacity) | |||
internal void Realloc<T>(uint newCapacity) where T : unmanaged | |||
{ | |||
unsafe | |||
{ | |||
@@ -197,7 +205,7 @@ namespace Svelto.ECS.DataStructures | |||
#endif | |||
if (newCapacity > 0) | |||
{ | |||
newPointer = (byte*) MemoryUtilities.Alloc(newCapacity, alignOf, allocator); | |||
newPointer = (byte*) MemoryUtilities.Alloc<T>(newCapacity, allocator); | |||
if (size > 0) | |||
{ | |||
var readerHead = _readIndex % capacity; | |||
@@ -206,7 +214,8 @@ namespace Svelto.ECS.DataStructures | |||
if (readerHead < writerHead) | |||
{ | |||
//copy to the new pointer, from th reader position | |||
MemoryUtilities.MemCpy((IntPtr) newPointer, (IntPtr) (ptr + readerHead), _writeIndex - _readIndex); | |||
uint currentSize = _writeIndex - _readIndex; | |||
Unsafe.CopyBlock(newPointer, _ptr + readerHead, currentSize); | |||
} | |||
//the assumption is that if size > 0 (so readerPointer and writerPointer are not the same) | |||
//writerHead wrapped and reached readerHead. so I have to copy from readerHead to the end | |||
@@ -214,15 +223,15 @@ namespace Svelto.ECS.DataStructures | |||
else | |||
{ | |||
var byteCountToEnd = capacity - readerHead; | |||
MemoryUtilities.MemCpy((IntPtr) newPointer, (IntPtr) (ptr + readerHead), byteCountToEnd); | |||
MemoryUtilities.MemCpy((IntPtr) (newPointer + byteCountToEnd), (IntPtr) ptr, writerHead); | |||
Unsafe.CopyBlock(newPointer, _ptr + readerHead, byteCountToEnd); | |||
Unsafe.CopyBlock(newPointer + byteCountToEnd, _ptr, writerHead); | |||
} | |||
} | |||
} | |||
if (ptr != null) | |||
MemoryUtilities.Free((IntPtr) ptr, allocator); | |||
if (_ptr != null) | |||
MemoryUtilities.Free((IntPtr) _ptr, allocator); | |||
_writeIndex = size; | |||
_readIndex = 0; | |||
@@ -237,8 +246,8 @@ namespace Svelto.ECS.DataStructures | |||
{ | |||
unsafe | |||
{ | |||
if (ptr != null) | |||
MemoryUtilities.Free((IntPtr) ptr, allocator); | |||
if (_ptr != null) | |||
MemoryUtilities.Free((IntPtr) _ptr, allocator); | |||
_ptr = null; | |||
_writeIndex = 0; | |||
@@ -259,7 +268,7 @@ namespace Svelto.ECS.DataStructures | |||
} | |||
#if UNITY_ECS | |||
[global::Unity.Collections.LowLevel.Unsafe.NativeDisableUnsafePtrRestriction] | |||
[NativeDisableUnsafePtrRestriction] | |||
#endif | |||
unsafe byte* _ptr; | |||
uint _writeIndex, _readIndex; | |||
@@ -60,9 +60,11 @@ namespace Svelto.ECS | |||
{ | |||
throw new NotImplementedException("can't run this until I add the checks!"); | |||
#pragma warning disable 162 | |||
_enginesRoot.Target.QueueEntitySubmitOperation( | |||
new EntitySubmitOperation(EntitySubmitOperationType.SwapGroup, new EGID(0, fromGroupID), | |||
new EGID(0, toGroupID))); | |||
#pragma warning restore 162 | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
@@ -291,6 +291,16 @@ namespace Svelto.ECS | |||
} | |||
} | |||
public QueryGroups CreateQueryGroup<T>() where T : IEntityComponent | |||
{ | |||
return new QueryGroups(FindGroups<T>()); | |||
} | |||
public bool FoundInGroups<T1>() where T1 : IEntityComponent | |||
{ | |||
return _groupsPerEntity.ContainsKey(TypeRefWrapper<T1>.wrapper); | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
T[] QueryEntitiesAndIndexInternal<T>(EGID entityGID, out uint index) where T : struct, IEntityComponent | |||
{ | |||
@@ -344,12 +354,16 @@ namespace Svelto.ECS | |||
{ | |||
internal static readonly T[] emptyArray = new T[0]; | |||
} | |||
internal FasterDictionary<uint, ITypeSafeDictionary> FindGroups<T1>() where T1 : unmanaged, IEntityComponent | |||
internal FasterDictionary<uint, ITypeSafeDictionary> FindGroups<T1>() where T1 : IEntityComponent | |||
{ | |||
if (_groupsPerEntity.ContainsKey(TypeRefWrapper<T1>.wrapper) == false) | |||
return _emptyDictionary; | |||
return _groupsPerEntity[TypeRefWrapper<T1>.wrapper]; | |||
} | |||
readonly FasterDictionary<uint, ITypeSafeDictionary> _emptyDictionary = new FasterDictionary<uint, ITypeSafeDictionary>(); | |||
readonly EntitiesStream _entityStream; | |||
//grouped set of entity views, this is the standard way to handle entity views entity views are grouped per | |||
@@ -10,7 +10,7 @@ namespace Svelto.ECS | |||
{ | |||
public EntityCollection(T[] array, uint count) : this() | |||
{ | |||
_buffer.Set(array); | |||
_buffer.Set(array, count); | |||
_count = count; | |||
} | |||
@@ -35,13 +35,12 @@ namespace Svelto.ECS | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
public NB<NT> ToNativeBuffer<NT>() where NT : unmanaged, T | |||
{ | |||
return new NB<NT>(Unsafe.As<NT[]>(_buffer.ToManagedArray()), _count); | |||
return new NB<NT>(_buffer.Pin(), _count, _buffer.capacity); | |||
} | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
public MB<T> ToBuffer(out uint count) | |||
public MB<T> ToBuffer() | |||
{ | |||
count = _count; | |||
return _buffer; | |||
} | |||
@@ -120,7 +119,7 @@ namespace Svelto.ECS | |||
public BT<MB<T1>, MB<T2>> ToBuffers() | |||
{ | |||
var bufferTuple = new BT<MB<T1>, MB<T2>> | |||
(_array1.ToBuffer(out _), _array2.ToBuffer(out _), count); | |||
(_array1.ToBuffer(), _array2.ToBuffer(), count); | |||
return bufferTuple; | |||
} | |||
@@ -213,7 +212,7 @@ namespace Svelto.ECS | |||
public BT<MB<T1>, MB<T2>, MB<T3>> ToBuffers() | |||
{ | |||
var bufferTuple = new BT<MB<T1>, MB<T2>, MB<T3>> | |||
(_array1.ToBuffer(out _), _array2.ToBuffer(out _), _array3.ToBuffer(out _), count); | |||
(_array1.ToBuffer(), _array2.ToBuffer(), _array3.ToBuffer(), count); | |||
return bufferTuple; | |||
} | |||
@@ -1,4 +1,4 @@ | |||
using System; | |||
using System.Collections.Generic; | |||
using Svelto.DataStructures; | |||
namespace Svelto.ECS | |||
@@ -9,7 +9,14 @@ namespace Svelto.ECS | |||
ExclusiveGroupStruct[] groups) | |||
where T1 : unmanaged, IEntityComponent where T2 : unmanaged, IEntityComponent | |||
{ | |||
return new NativeGroupsEnumerable<T1, T2>(db, groups); | |||
return new NativeGroupsEnumerable<T1, T2>(db, groups, (uint)groups.Length); | |||
} | |||
public static NativeGroupsEnumerable<T1, T2> NativeGroupsIterator<T1, T2>(this EntitiesDB db, | |||
FasterList<ExclusiveGroupStruct> groups) | |||
where T1 : unmanaged, IEntityComponent where T2 : unmanaged, IEntityComponent | |||
{ | |||
return new NativeGroupsEnumerable<T1, T2>(db, groups, groups.count); | |||
} | |||
public static NativeGroupsEnumerable<T1, T2, T3> NativeGroupsIterator | |||
@@ -1,17 +1,29 @@ | |||
using System; | |||
using Svelto.DataStructures; | |||
using Svelto.ECS.Internal; | |||
namespace Svelto.ECS | |||
{ | |||
public struct NativeAllGroupsEnumerable<T1> where T1 : unmanaged, IEntityComponent | |||
public readonly struct NativeAllGroupsEnumerable<T1> where T1 : unmanaged, IEntityComponent | |||
{ | |||
public NativeAllGroupsEnumerable(EntitiesDB db) | |||
{ | |||
_db = db; | |||
} | |||
public struct NativeGroupsIterator | |||
{ | |||
public struct CurrentGroup: IDisposable | |||
{ | |||
public NB<T1> buffer; | |||
public ExclusiveGroupStruct group; | |||
public void Dispose() | |||
{ | |||
buffer.Dispose(); | |||
} | |||
} | |||
public NativeGroupsIterator(EntitiesDB db) : this() | |||
{ | |||
_db = db.FindGroups<T1>().GetEnumerator(); | |||
@@ -28,8 +40,9 @@ namespace Svelto.ECS | |||
if (typeSafeDictionary.Count == 0) continue; | |||
_array = new EntityCollection<T1>(typeSafeDictionary.GetValuesArray(out var count), count) | |||
_array.buffer = new EntityCollection<T1>(typeSafeDictionary.GetValuesArray(out var count), count) | |||
.ToNativeBuffer<T1>(); | |||
_array.@group = new ExclusiveGroupStruct(group.Key); | |||
return true; | |||
} | |||
@@ -41,11 +54,10 @@ namespace Svelto.ECS | |||
{ | |||
} | |||
public NB<T1> Current => _array; | |||
readonly FasterDictionary<uint, ITypeSafeDictionary>.FasterDictionaryKeyValueEnumerator _db; | |||
public CurrentGroup Current => _array; | |||
NB<T1> _array; | |||
FasterDictionary<uint, ITypeSafeDictionary>.FasterDictionaryKeyValueEnumerator _db; | |||
CurrentGroup _array; | |||
} | |||
public NativeGroupsIterator GetEnumerator() | |||
@@ -101,7 +113,7 @@ namespace Svelto.ECS | |||
public BT<NB<T1>, NB<T2>> Current => _array; | |||
readonly FasterDictionary<uint, ITypeSafeDictionary>.FasterDictionaryKeyValueEnumerator _db; | |||
FasterDictionary<uint, ITypeSafeDictionary>.FasterDictionaryKeyValueEnumerator _db; | |||
BT<NB<T1>, NB<T2>> _array; | |||
} | |||
@@ -1,12 +1,14 @@ | |||
using DBC.ECS; | |||
using Svelto.DataStructures; | |||
namespace Svelto.ECS | |||
{ | |||
public struct NativeGroupsEnumerable<T1, T2, T3, T4> | |||
where T1 : unmanaged, IEntityComponent where T2 : unmanaged, IEntityComponent | |||
where T3 : unmanaged, IEntityComponent where T4 : unmanaged, IEntityComponent | |||
public readonly struct NativeGroupsEnumerable<T1, T2, T3, T4> where T1 : unmanaged, IEntityComponent | |||
where T2 : unmanaged, IEntityComponent | |||
where T3 : unmanaged, IEntityComponent | |||
where T4 : unmanaged, IEntityComponent | |||
{ | |||
readonly EntitiesDB _db; | |||
readonly EntitiesDB _db; | |||
readonly ExclusiveGroupStruct[] _groups; | |||
public NativeGroupsEnumerable(EntitiesDB db, ExclusiveGroupStruct[] groups) | |||
@@ -29,16 +31,20 @@ namespace Svelto.ECS | |||
//attention, the while is necessary to skip empty groups | |||
while (++_indexGroup < _groups.Length) | |||
{ | |||
var entityCollection = _entitiesDB.QueryEntities<T1, T2, T3>(_groups[_indexGroup]); | |||
if (entityCollection.count == 0) continue; | |||
var entityCollection1 = _entitiesDB.QueryEntities<T1, T2, T3>(_groups[_indexGroup]); | |||
if (entityCollection1.count == 0) | |||
continue; | |||
var entityCollection2 = _entitiesDB.QueryEntities<T4>(_groups[_indexGroup]); | |||
if (entityCollection2.count == 0) continue; | |||
DBC.ECS.Check.Assert(entityCollection.count == entityCollection2.count, "congratulation, you found a bug in Svelto, please report it"); | |||
if (entityCollection2.count == 0) | |||
continue; | |||
Check.Assert(entityCollection1.count == entityCollection2.count | |||
, "congratulation, you found a bug in Svelto, please report it"); | |||
BT<NB<T1>, NB<T2>, NB<T3>> array = entityCollection.ToNativeBuffers<T1, T2, T3>(); | |||
NB<T4> array2 = entityCollection2.ToNativeBuffer<T4>(); | |||
_array= new BT<NB<T1>, NB<T2>, NB<T3>, NB<T4>>(array.buffer1, array.buffer2, array.buffer3, array2, entityCollection.count); | |||
var array = entityCollection1.ToNativeBuffers<T1, T2, T3>(); | |||
var array2 = entityCollection2.ToNativeBuffer<T4>(); | |||
_array = new BT<NB<T1>, NB<T2>, NB<T3>, NB<T4>>(array.buffer1, array.buffer2, array.buffer3, array2 | |||
, entityCollection1.count); | |||
break; | |||
} | |||
@@ -51,18 +57,19 @@ namespace Svelto.ECS | |||
readonly ExclusiveGroupStruct[] _groups; | |||
int _indexGroup; | |||
int _indexGroup; | |||
BT<NB<T1>, NB<T2>, NB<T3>, NB<T4>> _array; | |||
readonly EntitiesDB _entitiesDB; | |||
readonly EntitiesDB _entitiesDB; | |||
} | |||
public NativeGroupsIterator GetEnumerator() { return new NativeGroupsIterator(_db, _groups); } | |||
} | |||
public struct NativeGroupsEnumerable<T1, T2, T3> | |||
where T1 : unmanaged, IEntityComponent where T2 : unmanaged, IEntityComponent where T3 : unmanaged, IEntityComponent | |||
public readonly struct NativeGroupsEnumerable<T1, T2, T3> where T1 : unmanaged, IEntityComponent | |||
where T2 : unmanaged, IEntityComponent | |||
where T3 : unmanaged, IEntityComponent | |||
{ | |||
readonly EntitiesDB _db; | |||
readonly EntitiesDB _db; | |||
readonly ExclusiveGroupStruct[] _groups; | |||
public NativeGroupsEnumerable(EntitiesDB db, ExclusiveGroupStruct[] groups) | |||
@@ -86,7 +93,8 @@ namespace Svelto.ECS | |||
while (++_indexGroup < _groups.Length) | |||
{ | |||
var entityCollection = _entitiesDB.QueryEntities<T1, T2, T3>(_groups[_indexGroup]); | |||
if (entityCollection.count == 0) continue; | |||
if (entityCollection.count == 0) | |||
continue; | |||
_array = entityCollection.ToNativeBuffers<T1, T2, T3>(); | |||
break; | |||
@@ -109,20 +117,29 @@ namespace Svelto.ECS | |||
public NativeGroupsIterator GetEnumerator() { return new NativeGroupsIterator(_db, _groups); } | |||
} | |||
public struct NativeGroupsEnumerable<T1, T2> where T1 : unmanaged, IEntityComponent where T2 : unmanaged, IEntityComponent | |||
public struct NativeGroupsEnumerable<T1, T2> where T1 : unmanaged, IEntityComponent | |||
where T2 : unmanaged, IEntityComponent | |||
{ | |||
public NativeGroupsEnumerable(EntitiesDB db, ExclusiveGroupStruct[] groups) | |||
public NativeGroupsEnumerable(EntitiesDB db, ExclusiveGroupStruct[] groups, uint groupsLength) | |||
{ | |||
_db = db; | |||
_db = db; | |||
_groups = groups; | |||
_groupsLength = groupsLength; | |||
} | |||
public NativeGroupsEnumerable(EntitiesDB db, FasterList<ExclusiveGroupStruct> groups, uint groupsLength) | |||
{ | |||
_db = db; | |||
_groups = groups.ToArrayFast(out _); | |||
_groupsLength = groupsLength; | |||
} | |||
public struct NativeGroupsIterator | |||
{ | |||
public NativeGroupsIterator(EntitiesDB db, ExclusiveGroupStruct[] groups) : this() | |||
{ | |||
_db = db; | |||
_groups = groups; | |||
_db = db; | |||
_groups = groups; | |||
_indexGroup = -1; | |||
} | |||
@@ -132,7 +149,8 @@ namespace Svelto.ECS | |||
while (++_indexGroup < _groups.Length) | |||
{ | |||
var entityCollection = _db.QueryEntities<T1, T2>(_groups[_indexGroup]); | |||
if (entityCollection.count == 0) continue; | |||
if (entityCollection.count == 0) | |||
continue; | |||
_array = entityCollection.ToNativeBuffers<T1, T2>(); | |||
break; | |||
@@ -141,29 +159,24 @@ namespace Svelto.ECS | |||
return _indexGroup < _groups.Length; | |||
} | |||
public void Reset() | |||
{ | |||
_indexGroup = -1; | |||
} | |||
public void Reset() { _indexGroup = -1; } | |||
public BT<NB<T1>, NB<T2>> Current => _array; | |||
readonly EntitiesDB _db; | |||
readonly EntitiesDB _db; | |||
readonly ExclusiveGroupStruct[] _groups; | |||
int _indexGroup; | |||
BT<NB<T1>, NB<T2>> _array; | |||
} | |||
public NativeGroupsIterator GetEnumerator() | |||
{ | |||
return new NativeGroupsIterator(_db, _groups); | |||
} | |||
public NativeGroupsIterator GetEnumerator() { return new NativeGroupsIterator(_db, _groups); } | |||
readonly EntitiesDB _db; | |||
readonly EntitiesDB _db; | |||
readonly ExclusiveGroupStruct[] _groups; | |||
readonly uint _groupsLength; | |||
} | |||
public struct NativeGroupsEnumerable<T1> where T1 : unmanaged, IEntityComponent | |||
{ | |||
public NativeGroupsEnumerable(EntitiesDB db, ExclusiveGroupStruct[] groups) | |||
@@ -187,7 +200,8 @@ namespace Svelto.ECS | |||
while (++_indexGroup < _groups.Length) | |||
{ | |||
var entityCollection = _db.QueryEntities<T1>(_groups[_indexGroup]); | |||
if (entityCollection.count == 0) continue; | |||
if (entityCollection.count == 0) | |||
continue; | |||
_array = entityCollection.ToNativeBuffer<T1>(); | |||
break; | |||
@@ -196,26 +210,20 @@ namespace Svelto.ECS | |||
return _indexGroup < _groups.Length; | |||
} | |||
public void Reset() | |||
{ | |||
_indexGroup = -1; | |||
} | |||
public void Reset() { _indexGroup = -1; } | |||
public NB<T1> Current => _array; | |||
readonly EntitiesDB _db; | |||
readonly EntitiesDB _db; | |||
readonly ExclusiveGroupStruct[] _groups; | |||
int _indexGroup; | |||
int _indexGroup; | |||
NB<T1> _array; | |||
} | |||
public NativeGroupsIterator GetEnumerator() | |||
{ | |||
return new NativeGroupsIterator(_db, _groups); | |||
} | |||
public NativeGroupsIterator GetEnumerator() { return new NativeGroupsIterator(_db, _groups); } | |||
readonly EntitiesDB _db; | |||
readonly EntitiesDB _db; | |||
readonly ExclusiveGroupStruct[] _groups; | |||
} | |||
} |
@@ -10,14 +10,14 @@ namespace Svelto.ECS | |||
public partial class EnginesRoot | |||
{ | |||
//todo: I very likely don't need to create one for each native entity factory, the same can be reused | |||
readonly AtomicRingBuffers _addOperationQueue = | |||
new AtomicRingBuffers(Common.Allocator.Persistent, JobsUtility.MaxJobThreadCount + 1); | |||
readonly AtomicNativeBags _addOperationQueue = | |||
new AtomicNativeBags(Common.Allocator.Persistent, JobsUtility.MaxJobThreadCount + 1); | |||
readonly AtomicRingBuffers _removeOperationQueue = | |||
new AtomicRingBuffers(Common.Allocator.Persistent, JobsUtility.MaxJobThreadCount + 1); | |||
readonly AtomicNativeBags _removeOperationQueue = | |||
new AtomicNativeBags(Common.Allocator.Persistent, JobsUtility.MaxJobThreadCount + 1); | |||
readonly AtomicRingBuffers _swapOperationQueue = | |||
new AtomicRingBuffers(Common.Allocator.Persistent, JobsUtility.MaxJobThreadCount + 1); | |||
readonly AtomicNativeBags _swapOperationQueue = | |||
new AtomicNativeBags(Common.Allocator.Persistent, JobsUtility.MaxJobThreadCount + 1); | |||
NativeEntityRemove ProvideNativeEntityRemoveQueue<T>() where T : IEntityDescriptor, new() | |||
{ | |||
@@ -142,10 +142,10 @@ namespace Svelto.ECS | |||
public readonly struct NativeEntityRemove | |||
{ | |||
readonly AtomicRingBuffers _removeQueue; | |||
readonly AtomicNativeBags _removeQueue; | |||
readonly uint _indexRemove; | |||
internal NativeEntityRemove(AtomicRingBuffers EGIDsToRemove, uint indexRemove) | |||
internal NativeEntityRemove(AtomicNativeBags EGIDsToRemove, uint indexRemove) | |||
{ | |||
_removeQueue = EGIDsToRemove; | |||
_indexRemove = indexRemove; | |||
@@ -162,10 +162,10 @@ namespace Svelto.ECS | |||
public readonly struct NativeEntitySwap | |||
{ | |||
readonly AtomicRingBuffers _swapQueue; | |||
readonly AtomicNativeBags _swapQueue; | |||
readonly uint _indexSwap; | |||
internal NativeEntitySwap(AtomicRingBuffers EGIDsToSwap, uint indexSwap) | |||
internal NativeEntitySwap(AtomicNativeBags EGIDsToSwap, uint indexSwap) | |||
{ | |||
_swapQueue = EGIDsToSwap; | |||
_indexSwap = indexSwap; | |||
@@ -188,10 +188,10 @@ namespace Svelto.ECS | |||
public readonly struct NativeEntityFactory | |||
{ | |||
readonly AtomicRingBuffers _addOperationQueue; | |||
readonly AtomicNativeBags _addOperationQueue; | |||
readonly uint _index; | |||
internal NativeEntityFactory(AtomicRingBuffers addOperationQueue, uint index) | |||
internal NativeEntityFactory(AtomicNativeBags addOperationQueue, uint index) | |||
{ | |||
_index = index; | |||
_addOperationQueue = addOperationQueue; | |||
@@ -1,4 +1,8 @@ | |||
#if UNITY_ECS | |||
using System; | |||
using System.Collections.Generic; | |||
using System.Linq; | |||
using System.Reflection; | |||
using System.Threading; | |||
using Svelto.DataStructures; | |||
using Svelto.ECS.DataStructures; | |||
@@ -30,14 +34,61 @@ namespace Svelto.ECS | |||
{ | |||
void FillFromByteArray(EntityComponentInitializer init, NativeBag buffer); | |||
} | |||
static class UnmanagedTypeExtensions | |||
{ | |||
private static Dictionary<Type, bool> cachedTypes = | |||
new Dictionary<Type, bool>(); | |||
public static bool IsUnManaged<T>() { return typeof(T).IsUnManaged(); } | |||
public static bool IsUnManaged(this Type t) | |||
{ | |||
var result = false; | |||
if (cachedTypes.ContainsKey(t)) | |||
return cachedTypes[t]; | |||
else if (t.IsPrimitive || t.IsPointer || t.IsEnum) | |||
result = true; | |||
else if (t.IsGenericType || !t.IsValueType) | |||
result = false; | |||
else | |||
result = t.GetFields(BindingFlags.Public | | |||
BindingFlags.NonPublic | BindingFlags.Instance) | |||
.All(x => x.FieldType.IsUnManaged()); | |||
cachedTypes.Add(t, result); | |||
return result; | |||
} | |||
} | |||
delegate void ForceUnmanagedCast<T>(EntityComponentInitializer init, NativeBag buffer) where T : struct, IEntityComponent; | |||
class Filler<T>: IFiller where T : struct, IEntityComponent | |||
{ | |||
static readonly ForceUnmanagedCast<T> _action; | |||
static Filler() | |||
{ | |||
var method = typeof(Trick).GetMethod(nameof(Trick.ForceUnmanaged)).MakeGenericMethod(typeof(T)); | |||
_action = (ForceUnmanagedCast<T>) Delegate.CreateDelegate(typeof(ForceUnmanagedCast<T>), method); | |||
} | |||
//it's an internal interface | |||
void IFiller.FillFromByteArray(EntityComponentInitializer init, NativeBag buffer) | |||
{ | |||
var component = buffer.Dequeue<T>(); | |||
DBC.ECS.Check.Require(UnmanagedTypeExtensions.IsUnManaged<T>() == true, "invalid type used"); | |||
_action(init, buffer); | |||
} | |||
static class Trick | |||
{ | |||
public static void ForceUnmanaged<U>(EntityComponentInitializer init, NativeBag buffer) where U : unmanaged, IEntityComponent | |||
{ | |||
var component = buffer.Dequeue<U>(); | |||
init.Init(component); | |||
init.Init(component); | |||
} | |||
} | |||
} | |||
@@ -48,7 +99,7 @@ namespace Svelto.ECS | |||
internal static void Register<T>(IFiller entityBuilder) where T : struct, IEntityComponent | |||
{ | |||
var location = EntityComponentID<T>.ID.Data = GlobalTypeID.NextID<T>(); | |||
TYPE_IDS.Add(location, entityBuilder); | |||
TYPE_IDS.AddAt(location, entityBuilder); | |||
} | |||
internal static IFiller GetTypeFromID(uint typeId) | |||
@@ -6,16 +6,16 @@ namespace Svelto.ECS | |||
{ | |||
public readonly struct NativeEGIDMapper<T>:IDisposable where T : unmanaged, IEntityComponent | |||
{ | |||
readonly NativeFasterDictionaryStruct<uint, T> map; | |||
readonly NativeFasterDictionary<uint, T> map; | |||
public ExclusiveGroupStruct groupID { get; } | |||
public NativeEGIDMapper(ExclusiveGroupStruct groupStructId, NativeFasterDictionaryStruct<uint, T> toNative):this() | |||
public NativeEGIDMapper(ExclusiveGroupStruct groupStructId, NativeFasterDictionary<uint, T> toNative):this() | |||
{ | |||
groupID = groupStructId; | |||
map = toNative; | |||
} | |||
public uint Count => map.Count; | |||
public uint Count => map.count; | |||
[MethodImpl(MethodImplOptions.AggressiveInlining)] | |||
public ref T Entity(uint entityID) | |||
@@ -48,7 +48,7 @@ namespace Svelto.ECS | |||
{ | |||
if (map.TryFindIndex(entityID, out index)) | |||
{ | |||
return new NB<T>(map.unsafeValues, map.Count); | |||
return new NB<T>(map.unsafeValues, map.count, map.capacity); | |||
} | |||
throw new ECSException("Entity not found"); | |||
@@ -58,7 +58,7 @@ namespace Svelto.ECS | |||
{ | |||
if (map.TryFindIndex(entityID, out index)) | |||
{ | |||
array = new NB<T>(map.unsafeValues, map.Count); | |||
array = new NB<T>(map.unsafeValues, map.count, map.capacity); | |||
return true; | |||
} | |||
@@ -73,7 +73,7 @@ namespace Svelto.ECS | |||
public bool Exists(uint idEntityId) | |||
{ | |||
return map.Count > 0 && map.TryFindIndex(idEntityId, out _); | |||
return map.count > 0 && map.TryFindIndex(idEntityId, out _); | |||
} | |||
} | |||
} |
@@ -0,0 +1,38 @@ | |||
using Svelto.DataStructures; | |||
using Svelto.ECS.Internal; | |||
namespace Svelto.ECS | |||
{ | |||
public struct QueryGroups | |||
{ | |||
public readonly FasterList<ExclusiveGroupStruct> groups; | |||
public QueryGroups(FasterDictionary<uint, ITypeSafeDictionary> findGroups) | |||
{ | |||
var findGroupsCount = findGroups.count; | |||
groups = new FasterList<ExclusiveGroupStruct>(findGroupsCount); | |||
foreach (var keyvalue in findGroups) | |||
{ | |||
groups.Add(new ExclusiveGroupStruct(keyvalue.Key)); | |||
} | |||
} | |||
public QueryGroups Except(ExclusiveGroupStruct[] groupsToIgnore) | |||
{ | |||
var groupsCount = groups.count; | |||
for (int i = 0; i < groupsToIgnore.Length; i++) | |||
{ | |||
for (int j = 0; j < groupsCount; j++) | |||
if (groupsToIgnore[i] == groups[j]) | |||
{ | |||
groups.UnorderedRemoveAt(j); | |||
j--; | |||
groupsCount--; | |||
} | |||
} | |||
return this; | |||
} | |||
} | |||
} |
@@ -2,7 +2,7 @@ using System; | |||
namespace Svelto.ECS.Internal | |||
{ | |||
public delegate void SetEGIDWithoutBoxingActionCast<T>(ref T target, EGID egid) where T : struct, IEntityComponent; | |||
delegate void SetEGIDWithoutBoxingActionCast<T>(ref T target, EGID egid) where T : struct, IEntityComponent; | |||
static class SetEGIDWithoutBoxing<T> where T : struct, IEntityComponent | |||
{ | |||