From 8722a9920c1f6119bf6e769cba270e63097f8e25 Mon Sep 17 00:00:00 2001
From: chai <215380520@qq.com>
Date: Thu, 23 May 2024 10:08:29 +0800
Subject: + astar project
---
.../Utilities/JobDependencyTracker.cs | 799 +++++++++++++++++++++
1 file changed, 799 insertions(+)
create mode 100644 Other/AstarPathfindingDemo/Packages/com.arongranberg.astar/Utilities/JobDependencyTracker.cs
(limited to 'Other/AstarPathfindingDemo/Packages/com.arongranberg.astar/Utilities/JobDependencyTracker.cs')
diff --git a/Other/AstarPathfindingDemo/Packages/com.arongranberg.astar/Utilities/JobDependencyTracker.cs b/Other/AstarPathfindingDemo/Packages/com.arongranberg.astar/Utilities/JobDependencyTracker.cs
new file mode 100644
index 0000000..19bb3b9
--- /dev/null
+++ b/Other/AstarPathfindingDemo/Packages/com.arongranberg.astar/Utilities/JobDependencyTracker.cs
@@ -0,0 +1,799 @@
+// #define DEBUG_JOBS
+namespace Pathfinding.Jobs {
+ using System.Reflection;
+ using Unity.Collections;
+ using Unity.Jobs;
+ using System.Collections.Generic;
+ using Unity.Collections.LowLevel.Unsafe;
+ using Pathfinding.Util;
+ using System.Runtime.InteropServices;
+ using System.Diagnostics;
+
+ ///
+ /// Disable the check that prevents jobs from including uninitialized native arrays open for reading.
+ ///
+ /// Sometimes jobs have to include a readable native array that starts out uninitialized.
+ /// The job might for example write to it and later read from it in the same job.
+ ///
+ /// See:
+ ///
+ class DisableUninitializedReadCheckAttribute : System.Attribute {
+ }
+
+ public interface IArenaDisposable {
+ void DisposeWith(DisposeArena arena);
+ }
+
+ /// Convenient collection of items that can be disposed together
+ public class DisposeArena {
+ List > buffer;
+ List > buffer2;
+ List > buffer3;
+ List gcHandles;
+
+ public void Add(NativeArray data) where T : unmanaged {
+ if (buffer == null) buffer = ListPool >.Claim();
+ buffer.Add(data.Reinterpret(UnsafeUtility.SizeOf()));
+ }
+
+ public void Add(NativeList data) where T : unmanaged {
+ // SAFETY: This is safe because NativeList and NativeList have the same memory layout.
+ var byteList = Unity.Collections.LowLevel.Unsafe.UnsafeUtility.As, NativeList >(ref data);
+ if (buffer2 == null) buffer2 = ListPool >.Claim();
+ buffer2.Add(byteList);
+ }
+
+ public void Add(NativeQueue data) where T : unmanaged {
+ // SAFETY: This is safe because NativeQueue and NativeQueue have the same memory layout.
+ var byteList = Unity.Collections.LowLevel.Unsafe.UnsafeUtility.As, NativeQueue >(ref data);
+ if (buffer3 == null) buffer3 = ListPool >.Claim();
+ buffer3.Add(byteList);
+ }
+
+ public void Remove(NativeArray data) where T : unmanaged {
+ if (buffer == null) return;
+ unsafe {
+ var ptr = NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(data);
+ for (int i = 0; i < buffer.Count; i++) {
+ if (NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(buffer[i]) == ptr) {
+ buffer.RemoveAtSwapBack(i);
+ return;
+ }
+ }
+ }
+ }
+
+ public void Add(T data) where T : IArenaDisposable {
+ data.DisposeWith(this);
+ }
+
+ public void Add (GCHandle handle) {
+ if (gcHandles == null) gcHandles = ListPool.Claim();
+ gcHandles.Add(handle);
+ }
+
+ ///
+ /// Dispose all items in the arena.
+ /// This also clears the arena and makes it available for reuse.
+ ///
+ public void DisposeAll () {
+ UnityEngine.Profiling.Profiler.BeginSample("Disposing");
+ if (buffer != null) {
+ for (int i = 0; i < buffer.Count; i++) buffer[i].Dispose();
+ ListPool >.Release(ref buffer);
+ }
+ if (buffer2 != null) {
+ for (int i = 0; i < buffer2.Count; i++) buffer2[i].Dispose();
+ ListPool >.Release(ref buffer2);
+ }
+ if (buffer3 != null) {
+ for (int i = 0; i < buffer3.Count; i++) buffer3[i].Dispose();
+ ListPool >.Release(ref buffer3);
+ }
+ if (gcHandles != null) {
+ for (int i = 0; i < gcHandles.Count; i++) gcHandles[i].Free();
+ ListPool.Release(ref gcHandles);
+ }
+ UnityEngine.Profiling.Profiler.EndSample();
+ }
+ }
+
+ // TODO: Remove or use?
+ public struct JobHandleWithMainThreadWork where T : struct {
+ JobDependencyTracker tracker;
+ IEnumerator<(JobHandle, T)> coroutine;
+
+ public JobHandleWithMainThreadWork (IEnumerator<(JobHandle, T)> handles, JobDependencyTracker tracker) {
+ this.coroutine = handles;
+ this.tracker = tracker;
+ }
+
+ public void Complete () {
+ tracker.timeSlice = TimeSlice.Infinite;
+ while (coroutine.MoveNext()) {
+ coroutine.Current.Item1.Complete();
+ }
+ }
+
+ public System.Collections.Generic.IEnumerable CompleteTimeSliced (float maxMillisPerStep) {
+ tracker.timeSlice = TimeSlice.MillisFromNow(maxMillisPerStep);
+ while (true) {
+ if (!coroutine.MoveNext()) yield break;
+ if (maxMillisPerStep < float.PositiveInfinity) {
+ while (!coroutine.Current.Item1.IsCompleted) {
+ yield return null;
+ tracker.timeSlice = TimeSlice.MillisFromNow(maxMillisPerStep);
+ }
+ }
+ coroutine.Current.Item1.Complete();
+ yield return coroutine.Current.Item2;
+ tracker.timeSlice = TimeSlice.MillisFromNow(maxMillisPerStep);
+ }
+ }
+ }
+
+ enum LinearDependencies : byte {
+ Check,
+ Enabled,
+ Disabled,
+ }
+
+ ///
+ /// Automatic dependency tracking for the Unity Job System.
+ ///
+ /// Uses reflection to find the [ReadOnly] and [WriteOnly] attributes on job data struct fields.
+ /// These are used to automatically figure out dependencies between jobs.
+ ///
+ /// A job that reads from an array depends on the last job that wrote to that array.
+ /// A job that writes to an array depends on the last job that wrote to the array as well as all jobs that read from the array.
+ ///
+ ///
+ /// struct ExampleJob : IJob {
+ /// public NativeArray someData;
+ ///
+ /// public void Execute () {
+ /// // Do something
+ /// }
+ /// }
+ ///
+ /// void Start () {
+ /// var tracker = new JobDependencyTracker();
+ /// var data = new NativeArray(100, Allocator.TempJob);
+ /// var job1 = new ExampleJob {
+ /// someData = data
+ /// }.Schedule(tracker);
+ ///
+ /// var job2 = new ExampleJob {
+ /// someData = data
+ /// }.Schedule(tracker);
+ ///
+ /// // job2 automatically depends on job1 because they both require read/write access to the data array
+ /// }
+ ///
+ ///
+ /// See:
+ ///
+ public class JobDependencyTracker : IAstarPooledObject {
+ internal List slots = ListPool.Claim();
+ DisposeArena arena;
+ internal NativeArray dependenciesScratchBuffer;
+ LinearDependencies linearDependencies;
+ internal TimeSlice timeSlice = TimeSlice.Infinite;
+
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ ~JobDependencyTracker() {
+ if (dependenciesScratchBuffer.IsCreated) {
+ UnityEngine.Debug.LogError("JobDependencyTracker was not disposed. This will cause a memory leak. Please call Dispose on the JobDependencyTracker when you are done with it.");
+ }
+ }
+#endif
+
+ public bool forceLinearDependencies {
+ get {
+ if (linearDependencies == LinearDependencies.Check) SetLinearDependencies(false);
+ return linearDependencies == LinearDependencies.Enabled;
+ }
+ }
+
+ internal struct JobInstance {
+ public JobHandle handle;
+ public int hash;
+#if DEBUG_JOBS
+ public string name;
+#endif
+ }
+
+ internal struct NativeArraySlot {
+ public long hash;
+ public JobInstance lastWrite;
+ public List lastReads;
+ public bool initialized;
+ public bool hasWrite;
+ }
+
+ // Note: burst compiling even an empty job can avoid the overhead of going from unmanaged to managed code.
+ /* [BurstCompile]
+ struct JobDispose : IJob where T : struct {
+ [DeallocateOnJobCompletion]
+ [DisableUninitializedReadCheck]
+ public NativeArray data;
+
+ public void Execute () {
+ }
+ }*/
+
+ struct JobRaycastCommandDummy : IJob {
+ [ReadOnly]
+ public NativeArray commands;
+ [WriteOnly]
+ public NativeArray results;
+
+ public void Execute () {}
+ }
+
+#if UNITY_2022_2_OR_NEWER
+ struct JobOverlapCapsuleCommandDummy : IJob {
+ [ReadOnly]
+ public NativeArray commands;
+ [WriteOnly]
+ public NativeArray results;
+
+ public void Execute () {}
+ }
+
+ struct JobOverlapSphereCommandDummy : IJob {
+ [ReadOnly]
+ public NativeArray commands;
+ [WriteOnly]
+ public NativeArray results;
+
+ public void Execute () {}
+ }
+#endif
+
+ ///
+ /// JobHandle that represents a dependency for all jobs.
+ /// All native arrays that are written (and have been tracked by this tracker) to will have their final results in them
+ /// when the returned job handle is complete.
+ ///
+ public JobHandle AllWritesDependency {
+ get {
+ var handles = new NativeArray(slots.Count, Allocator.Temp);
+ for (int i = 0; i < slots.Count; i++) handles[i] = slots[i].lastWrite.handle;
+ var dependencies = JobHandle.CombineDependencies(handles);
+ handles.Dispose();
+ return dependencies;
+ }
+ }
+
+ bool supportsMultithreading {
+ get {
+#if UNITY_WEBGL
+ return false;
+#else
+ return Unity.Jobs.LowLevel.Unsafe.JobsUtility.JobWorkerCount > 0;
+#endif
+ }
+ }
+
+ ///
+ /// Disable dependency tracking and just run jobs one after the other.
+ /// This may be faster in some cases since dependency tracking has some overhead.
+ ///
+ public void SetLinearDependencies (bool linearDependencies) {
+ if (!supportsMultithreading) linearDependencies = true;
+
+ if (linearDependencies) {
+ AllWritesDependency.Complete();
+ }
+ this.linearDependencies = linearDependencies ? LinearDependencies.Enabled : LinearDependencies.Disabled;
+ }
+
+ public NativeArray NewNativeArray(int length, Allocator allocator, NativeArrayOptions options = NativeArrayOptions.ClearMemory) where T : unmanaged {
+ var res = new NativeArray(length, allocator, options);
+ Track(res, options == NativeArrayOptions.ClearMemory);
+ return res;
+ }
+
+ public void Track(NativeArray array, bool initialized = true) where T : unmanaged {
+ unsafe {
+ slots.Add(new NativeArraySlot {
+ hash = (long)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(array),
+ lastWrite = default,
+ lastReads = ListPool.Claim(),
+ initialized = initialized,
+ });
+ }
+ if (this.arena == null) this.arena = new DisposeArena();
+ arena.Add(array);
+ }
+
+ ///
+ /// Makes the given array not be disposed when this tracker is disposed.
+ /// This is useful if you want to keep the array around after the tracker has been disposed.
+ /// The array will still be tracked for the purposes of automatic dependency management.
+ ///
+ public void Persist(NativeArray array) where T : unmanaged {
+ if (this.arena == null) return;
+ arena.Remove(array);
+ }
+
+ ///
+ /// Schedules a raycast batch command.
+ /// Like RaycastCommand.ScheduleBatch, but dependencies are tracked automatically.
+ ///
+ public JobHandle ScheduleBatch (NativeArray commands, NativeArray results, int minCommandsPerJob) {
+ if (forceLinearDependencies) {
+ UnityEngine.RaycastCommand.ScheduleBatch(commands, results, minCommandsPerJob).Complete();
+ return default;
+ }
+
+ // Create a dummy structure to allow the analyzer to determine how the job reads/writes data
+ var dummy = new JobRaycastCommandDummy { commands = commands, results = results };
+ var dependencies = JobDependencyAnalyzer.GetDependencies(ref dummy, this);
+ var job = UnityEngine.RaycastCommand.ScheduleBatch(commands, results, minCommandsPerJob, dependencies);
+
+ JobDependencyAnalyzer.Scheduled(ref dummy, this, job);
+ return job;
+ }
+
+#if UNITY_2022_2_OR_NEWER
+ ///
+ /// Schedules an overlap capsule batch command.
+ /// Like OverlapCapsuleCommand.ScheduleBatch, but dependencies are tracked automatically.
+ ///
+ public JobHandle ScheduleBatch (NativeArray commands, NativeArray results, int minCommandsPerJob) {
+ if (forceLinearDependencies) {
+ UnityEngine.OverlapCapsuleCommand.ScheduleBatch(commands, results, minCommandsPerJob, 1).Complete();
+ return default;
+ }
+
+ // Create a dummy structure to allow the analyzer to determine how the job reads/writes data
+ var dummy = new JobOverlapCapsuleCommandDummy { commands = commands, results = results };
+ var dependencies = JobDependencyAnalyzer.GetDependencies(ref dummy, this);
+ var job = UnityEngine.OverlapCapsuleCommand.ScheduleBatch(commands, results, minCommandsPerJob, 1, dependencies);
+
+ JobDependencyAnalyzer.Scheduled(ref dummy, this, job);
+ return job;
+ }
+
+ ///
+ /// Schedules an overlap sphere batch command.
+ /// Like OverlapSphereCommand.ScheduleBatch, but dependencies are tracked automatically.
+ ///
+ public JobHandle ScheduleBatch (NativeArray commands, NativeArray results, int minCommandsPerJob) {
+ if (forceLinearDependencies) {
+ UnityEngine.OverlapSphereCommand.ScheduleBatch(commands, results, minCommandsPerJob, 1).Complete();
+ return default;
+ }
+
+ // Create a dummy structure to allow the analyzer to determine how the job reads/writes data
+ var dummy = new JobOverlapSphereCommandDummy { commands = commands, results = results };
+ var dependencies = JobDependencyAnalyzer.GetDependencies(ref dummy, this);
+ var job = UnityEngine.OverlapSphereCommand.ScheduleBatch(commands, results, minCommandsPerJob, 1, dependencies);
+
+ JobDependencyAnalyzer.Scheduled(ref dummy, this, job);
+ return job;
+ }
+#endif
+
+ /// Frees the GCHandle when the JobDependencyTracker is disposed
+ public void DeferFree (GCHandle handle, JobHandle dependsOn) {
+ if (this.arena == null) this.arena = new DisposeArena();
+ this.arena.Add(handle);
+ }
+
+#if DEBUG_JOBS
+ internal void JobReadsFrom (JobHandle job, long nativeArrayHash, int jobHash, string jobName)
+#else
+ internal void JobReadsFrom (JobHandle job, long nativeArrayHash, int jobHash)
+#endif
+ {
+ for (int j = 0; j < slots.Count; j++) {
+ var slot = slots[j];
+ if (slot.hash == nativeArrayHash) {
+ // If the job only reads from the array then we just add this job to the list of readers
+ slot.lastReads.Add(new JobInstance {
+ handle = job,
+ hash = jobHash,
+#if DEBUG_JOBS
+ name = jobName,
+#endif
+ });
+ break;
+ }
+ }
+ }
+
+#if DEBUG_JOBS
+ internal void JobWritesTo (JobHandle job, long nativeArrayHash, int jobHash, string jobName)
+#else
+ internal void JobWritesTo (JobHandle job, long nativeArrayHash, int jobHash)
+#endif
+ {
+ for (int j = 0; j < slots.Count; j++) {
+ var slot = slots[j];
+ if (slot.hash == nativeArrayHash) {
+ // If the job writes to the array then this job is now the last writer
+ slot.lastWrite = new JobInstance {
+ handle = job,
+ hash = jobHash,
+#if DEBUG_JOBS
+ name = jobName,
+#endif
+ };
+ slot.lastReads.Clear();
+ // The array no longer contains uninitialized data.
+ // Parts of it may still be uninitialized if the job doesn't write to everything, but that's something that this class cannot track.
+ slot.initialized = true;
+ slot.hasWrite = true;
+ slots[j] = slot;
+ break;
+ }
+ }
+ }
+
+ ///
+ /// Disposes this tracker.
+ /// This will pool all used lists which makes the GC happy.
+ ///
+ /// Note: It is necessary to call this method to avoid memory leaks if you are using the DeferDispose method. But it's a good thing to do otherwise as well.
+ /// It is automatically called if you are using the ObjectPool.Release method.
+ ///
+ void Dispose () {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS && UNITY_2022_2_OR_NEWER
+ // Note: This can somehow fail in Unity 2021 and 2022.1, even when calling Complete on all jobs
+ UnityEngine.Assertions.Assert.IsTrue(AllWritesDependency.IsCompleted);
+#endif
+ for (int i = 0; i < slots.Count; i++) ListPool.Release(slots[i].lastReads);
+
+ slots.Clear();
+ if (arena != null) arena.DisposeAll();
+ linearDependencies = LinearDependencies.Check;
+ if (dependenciesScratchBuffer.IsCreated) dependenciesScratchBuffer.Dispose();
+ }
+
+ public void ClearMemory () {
+ AllWritesDependency.Complete();
+ Dispose();
+ }
+
+ void IAstarPooledObject.OnEnterPool () {
+ Dispose();
+ }
+ }
+
+ public struct TimeSlice {
+ public long endTick;
+ public static readonly TimeSlice Infinite = new TimeSlice { endTick = long.MaxValue };
+ public bool expired => Stopwatch.GetTimestamp() > endTick;
+
+ public static TimeSlice MillisFromNow (float millis) => new TimeSlice { endTick = Stopwatch.GetTimestamp() + (long)(millis * 10000) };
+ }
+
+ public interface IJobTimeSliced : IJob {
+ ///
+ /// Returns true if the job completed.
+ /// If false is returned this job may be called again until the job completes.
+ ///
+ bool Execute(TimeSlice timeSlice);
+ }
+
+ /// Extension methods for IJob and related interfaces
+ public static class IJobExtensions {
+ struct ManagedJob : IJob {
+ public GCHandle handle;
+
+ public void Execute () {
+ ((IJob)handle.Target).Execute();
+ handle.Free();
+ }
+ }
+
+ struct ManagedActionJob : IJob {
+ public GCHandle handle;
+
+ public void Execute () {
+ ((System.Action)handle.Target)();
+ handle.Free();
+ }
+ }
+
+ ///
+ /// Schedule a job with automatic dependency tracking.
+ /// You need to have "using Pathfinding.Util" in your script to be able to use this extension method.
+ ///
+ /// See:
+ ///
+ // TODO: Compare performance impact by using ref this, and ScheduleByRef
+ public static JobHandle Schedule(this T data, JobDependencyTracker tracker) where T : struct, IJob {
+ if (tracker.forceLinearDependencies) {
+ data.Run();
+ return default;
+ } else {
+ var job = data.Schedule(JobDependencyAnalyzer.GetDependencies(ref data, tracker));
+ JobDependencyAnalyzer.Scheduled(ref data, tracker, job);
+ return job;
+ }
+ }
+
+ /// Schedules an job with automatic dependency tracking
+ public static JobHandle ScheduleBatch(this T data, int arrayLength, int minIndicesPerJobCount, JobDependencyTracker tracker, JobHandle additionalDependency = default) where T : struct, IJobParallelForBatched {
+ if (tracker.forceLinearDependencies) {
+ additionalDependency.Complete();
+ //data.ScheduleBatch(arrayLength, minIndicesPerJobCount, additionalDependency).Complete();
+ data.RunBatch(arrayLength);
+ return default;
+ } else {
+ var job = data.ScheduleBatch(arrayLength, minIndicesPerJobCount, JobDependencyAnalyzer.GetDependencies(ref data, tracker, additionalDependency));
+
+ JobDependencyAnalyzer.Scheduled(ref data, tracker, job);
+ return job;
+ }
+ }
+
+ /// Schedules a managed job to run in the job system
+ public static JobHandle ScheduleManaged(this T data, JobHandle dependsOn) where T : struct, IJob {
+ return new ManagedJob { handle = GCHandle.Alloc(data) }.Schedule(dependsOn);
+ }
+
+ /// Schedules a managed job to run in the job system
+ public static JobHandle ScheduleManaged (this System.Action data, JobHandle dependsOn) {
+ return new ManagedActionJob {
+ handle = GCHandle.Alloc(data)
+ }.Schedule(dependsOn);
+ }
+
+ public static JobHandle GetDependencies(this T data, JobDependencyTracker tracker) where T : struct, IJob {
+ if (tracker.forceLinearDependencies) return default;
+ else return JobDependencyAnalyzer.GetDependencies(ref data, tracker);
+ }
+
+ ///
+ /// Executes this job in the main thread using a coroutine.
+ /// Usage:
+ /// - 1. Optionally schedule some other jobs before this one (using the dependency tracker)
+ /// - 2. Call job.ExecuteMainThreadJob(tracker)
+ /// - 3. Iterate over the enumerator until it is finished. Call handle.Complete on all yielded job handles. Usually this only yields once, but if you use the wrapper it will
+ /// yield once for every time slice.
+ /// - 4. Continue scheduling other jobs.
+ ///
+ /// You must not schedule other jobs (that may touch the same data) while executing this job.
+ ///
+ /// See:
+ ///
+ public static IEnumerator ExecuteMainThreadJob(this T data, JobDependencyTracker tracker) where T : struct, IJobTimeSliced {
+ if (tracker.forceLinearDependencies) {
+ UnityEngine.Profiling.Profiler.BeginSample("Main Thread Work");
+ data.Execute();
+ UnityEngine.Profiling.Profiler.EndSample();
+ yield break;
+ }
+
+ var dependsOn = JobDependencyAnalyzer.GetDependencies(ref data, tracker);
+ yield return dependsOn;
+
+ while (true) {
+ UnityEngine.Profiling.Profiler.BeginSample("Main Thread Work");
+ var didComplete = data.Execute(tracker.timeSlice);
+ UnityEngine.Profiling.Profiler.EndSample();
+ if (didComplete) yield break;
+ else yield return new JobHandle();
+ }
+ }
+ }
+
+ internal static class JobDependencyAnalyzerAssociated {
+ internal static UnityEngine.Profiling.CustomSampler getDependenciesSampler = UnityEngine.Profiling.CustomSampler.Create("GetDependencies");
+ internal static UnityEngine.Profiling.CustomSampler iteratingSlotsSampler = UnityEngine.Profiling.CustomSampler.Create("IteratingSlots");
+ internal static UnityEngine.Profiling.CustomSampler initSampler = UnityEngine.Profiling.CustomSampler.Create("Init");
+ internal static UnityEngine.Profiling.CustomSampler combineSampler = UnityEngine.Profiling.CustomSampler.Create("Combining");
+ internal static int[] tempJobDependencyHashes = new int[16];
+ internal static int jobCounter = 1;
+ }
+
+ struct JobDependencyAnalyzer where T : struct {
+ static ReflectionData reflectionData;
+
+ /// Offset to the m_Buffer field inside each NativeArray
+ // Note: Due to a Unity bug we have to calculate this for NativeArray instead of NativeArray<>. NativeArray<> will return an incorrect value (-16) when using IL2CPP.
+ static readonly int BufferOffset = UnsafeUtility.GetFieldOffset(typeof(NativeArray).GetField("m_Buffer", BindingFlags.Instance | BindingFlags.NonPublic));
+ static readonly int SpanPtrOffset = UnsafeUtility.GetFieldOffset(typeof(UnsafeSpan).GetField("ptr", BindingFlags.Instance | BindingFlags.NonPublic));
+ struct ReflectionData {
+ public int[] fieldOffsets;
+ public bool[] writes;
+ public bool[] checkUninitializedRead;
+ public string[] fieldNames;
+
+ public void Build () {
+ // Find the byte offsets within the struct to all m_Buffer fields in all the native arrays in the struct
+ var fields = new List();
+ var writes = new List();
+ var reads = new List();
+ var names = new List();
+
+ Build(typeof(T), fields, writes, reads, names, 0, false, false, false);
+ this.fieldOffsets = fields.ToArray();
+ this.writes = writes.ToArray();
+ this.fieldNames = names.ToArray();
+ this.checkUninitializedRead = reads.ToArray();
+ }
+
+ void Build (System.Type type, List fields, List writes, List reads, List names, int offset, bool forceReadOnly, bool forceWriteOnly, bool forceDisableUninitializedCheck) {
+ foreach (var field in type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)) {
+ if (field.FieldType.IsGenericType && field.FieldType.GetGenericTypeDefinition() == typeof(NativeArray<>)) {
+ // Handle NativeArrays
+ fields.Add(offset + UnsafeUtility.GetFieldOffset(field) + BufferOffset);
+ writes.Add(!forceReadOnly && field.GetCustomAttribute(typeof(ReadOnlyAttribute)) == null);
+ reads.Add(!forceWriteOnly && !forceDisableUninitializedCheck && field.GetCustomAttribute(typeof(WriteOnlyAttribute)) == null && field.GetCustomAttribute(typeof(DisableUninitializedReadCheckAttribute)) == null);
+ names.Add(field.Name);
+ } else if (field.FieldType.IsGenericType && field.FieldType.GetGenericTypeDefinition() == typeof(UnsafeSpan<>)) {
+ // Handle UnsafeSpans
+ fields.Add(offset + UnsafeUtility.GetFieldOffset(field) + SpanPtrOffset);
+ writes.Add(!forceReadOnly && field.GetCustomAttribute(typeof(ReadOnlyAttribute)) == null);
+ reads.Add(!forceWriteOnly && !forceDisableUninitializedCheck && field.GetCustomAttribute(typeof(WriteOnlyAttribute)) == null && field.GetCustomAttribute(typeof(DisableUninitializedReadCheckAttribute)) == null);
+ names.Add(field.Name);
+ } else if (!field.FieldType.IsPrimitive && field.FieldType.IsValueType && !field.FieldType.IsEnum) {
+ // Recurse to handle nested types
+ bool readOnly = field.GetCustomAttribute(typeof(ReadOnlyAttribute)) != null;
+ bool writeOnly = field.GetCustomAttribute(typeof(WriteOnlyAttribute)) != null;
+ bool disableUninitializedCheck = field.GetCustomAttribute(typeof(DisableUninitializedReadCheckAttribute)) != null;
+ Build(field.FieldType, fields, writes, reads, names, offset + UnsafeUtility.GetFieldOffset(field), readOnly, writeOnly, disableUninitializedCheck);
+ }
+ }
+ }
+ }
+
+ static void initReflectionData () {
+ if (reflectionData.fieldOffsets == null) {
+ reflectionData.Build();
+ }
+ }
+
+ static bool HasHash (int[] hashes, int hash, int count) {
+ for (int i = 0; i < count; i++) if (hashes[i] == hash) return true;
+ return false;
+ }
+
+ /// Returns the dependencies for the given job.
+ /// Job data. Must be allocated on the stack.
+ /// The tracker to use for dependency tracking.
+ public static JobHandle GetDependencies (ref T data, JobDependencyTracker tracker) {
+ return GetDependencies(ref data, tracker, default, false);
+ }
+
+ public static JobHandle GetDependencies (ref T data, JobDependencyTracker tracker, JobHandle additionalDependency) {
+ return GetDependencies(ref data, tracker, additionalDependency, true);
+ }
+
+ static JobHandle GetDependencies (ref T data, JobDependencyTracker tracker, JobHandle additionalDependency, bool useAdditionalDependency) {
+ //JobDependencyAnalyzerAssociated.getDependenciesSampler.Begin();
+ //JobDependencyAnalyzerAssociated.initSampler.Begin();
+ if (!tracker.dependenciesScratchBuffer.IsCreated) tracker.dependenciesScratchBuffer = new NativeArray(16, Allocator.Persistent, NativeArrayOptions.UninitializedMemory);
+ var dependencies = tracker.dependenciesScratchBuffer;
+ var slots = tracker.slots;
+ var dependencyHashes = JobDependencyAnalyzerAssociated.tempJobDependencyHashes;
+
+ int numDependencies = 0;
+
+ //JobDependencyAnalyzerAssociated.initSampler.End();
+ initReflectionData();
+#if DEBUG_JOBS
+ string dependenciesDebug = "";
+#endif
+ unsafe {
+ // Note: data is a struct. It is stored on the stack and can thus not be moved by the GC.
+ // Therefore we do not need to pin it first.
+ // It is guaranteed to be stored on the stack since the Schedule method takes the data parameter by value and not by reference.
+ byte* dataPtr = (byte*)UnsafeUtility.AddressOf(ref data);
+
+ var offsets = reflectionData.fieldOffsets;
+ for (int i = 0; i < offsets.Length; i++) {
+ // This is the internal value of the m_Buffer field of the NativeArray
+ void* nativeArrayBufferPtr = *(void**)(dataPtr + offsets[i]);
+
+ // Use the pointer as a hash to uniquely identify a NativeArray
+ var hash = (long)nativeArrayBufferPtr;
+
+ //JobDependencyAnalyzerAssociated.iteratingSlotsSampler.Begin();
+ for (int j = 0; j <= slots.Count; j++) {
+ // No slot found. Add a new one
+ if (j == slots.Count) {
+ slots.Add(new JobDependencyTracker.NativeArraySlot {
+ hash = hash,
+ lastWrite = default,
+ lastReads = ListPool.Claim(),
+ initialized = true, // We don't know anything about the array, so assume it contains initialized data. JobDependencyTracker.NewNativeArray should be used otherwise.
+ hasWrite = false,
+ });
+ }
+
+ // Check if we know about this NativeArray yet
+ var slot = slots[j];
+ if (slot.hash == hash) {
+ if (reflectionData.checkUninitializedRead[i] && !slot.initialized) {
+ throw new System.InvalidOperationException("A job tries to read from the native array " + typeof(T).Name + "." + reflectionData.fieldNames[i] + " which contains uninitialized data");
+ }
+
+ if (slot.hasWrite && !HasHash(dependencyHashes, slot.lastWrite.hash, numDependencies)) {
+ // Reads/writes always depend on the last write to the native array
+ dependencies[numDependencies] = slot.lastWrite.handle;
+ dependencyHashes[numDependencies] = slot.lastWrite.hash;
+ numDependencies++;
+ if (numDependencies >= dependencies.Length) throw new System.Exception("Too many dependencies for job");
+#if DEBUG_JOBS
+ dependenciesDebug += slot.lastWrite.name + " ";
+#endif
+ }
+
+ // If we want to write to the array we additionally depend on all previous reads of the array
+ if (reflectionData.writes[i]) {
+ for (int q = 0; q < slot.lastReads.Count; q++) {
+ if (!HasHash(dependencyHashes, slot.lastReads[q].hash, numDependencies)) {
+ dependencies[numDependencies] = slot.lastReads[q].handle;
+ dependencyHashes[numDependencies] = slot.lastReads[q].hash;
+ numDependencies++;
+ if (numDependencies >= dependencies.Length) throw new System.Exception("Too many dependencies for job");
+#if DEBUG_JOBS
+ dependenciesDebug += slot.lastReads[q].name + " ";
+#endif
+ }
+ }
+ }
+ break;
+ }
+ }
+ //JobDependencyAnalyzerAssociated.iteratingSlotsSampler.End();
+ }
+
+ if (useAdditionalDependency) {
+ dependencies[numDependencies] = additionalDependency;
+ numDependencies++;
+#if DEBUG_JOBS
+ dependenciesDebug += "[additional dependency]";
+#endif
+ }
+
+#if DEBUG_JOBS
+ UnityEngine.Debug.Log(typeof(T) + " depends on " + dependenciesDebug);
+#endif
+
+ // JobDependencyAnalyzerAssociated.getDependenciesSampler.End();
+ if (numDependencies == 0) {
+ return default;
+ } else if (numDependencies == 1) {
+ return dependencies[0];
+ } else {
+ //JobDependencyAnalyzerAssociated.combineSampler.Begin();
+ return JobHandle.CombineDependencies(dependencies.Slice(0, numDependencies));
+ //JobDependencyAnalyzerAssociated.combineSampler.End();
+ }
+ }
+ }
+
+ internal static void Scheduled (ref T data, JobDependencyTracker tracker, JobHandle job) {
+ unsafe {
+ int jobHash = JobDependencyAnalyzerAssociated.jobCounter++;
+ // Note: data is a struct. It is stored on the stack and can thus not be moved by the GC.
+ // Therefore we do not need to pin it first.
+ // It is guaranteed to be stored on the stack since the Schedule method takes the data parameter by value and not by reference.
+ byte* dataPtr = (byte*)UnsafeUtility.AddressOf(ref data);
+ for (int i = 0; i < reflectionData.fieldOffsets.Length; i++) {
+ // This is the internal value of the m_Buffer field of the NativeArray
+ void* nativeArrayBufferPtr = *(void**)(dataPtr + reflectionData.fieldOffsets[i]);
+
+ // Use the pointer as a hash to uniquely identify a NativeArray
+ var hash = (long)nativeArrayBufferPtr;
+#if DEBUG_JOBS
+ if (reflectionData.writes[i]) tracker.JobWritesTo(job, hash, jobHash, typeof(T).Name);
+ else tracker.JobReadsFrom(job, hash, jobHash, typeof(T).Name);
+#else
+ if (reflectionData.writes[i]) tracker.JobWritesTo(job, hash, jobHash);
+ else tracker.JobReadsFrom(job, hash, jobHash);
+#endif
+ }
+ }
+ }
+ }
+}
--
cgit v1.1-26-g67d0