summaryrefslogtreecommitdiff
path: root/UnityCollection/Assets/Infrastructure/CollectionsExt/IndexedSet.cs
diff options
context:
space:
mode:
authorchai <chaifix@163.com>2021-06-12 20:10:36 +0800
committerchai <chaifix@163.com>2021-06-12 20:10:36 +0800
commit8d9978acfa77fb98dd517e5b3795add8244d3d9b (patch)
tree13678bc7d3c44413f87d77d7dfa3fbe7affe6d45 /UnityCollection/Assets/Infrastructure/CollectionsExt/IndexedSet.cs
parent5145dd09cab55a896d510482f2e22f74b4d4b76f (diff)
*misc
Diffstat (limited to 'UnityCollection/Assets/Infrastructure/CollectionsExt/IndexedSet.cs')
-rw-r--r--UnityCollection/Assets/Infrastructure/CollectionsExt/IndexedSet.cs162
1 files changed, 162 insertions, 0 deletions
diff --git a/UnityCollection/Assets/Infrastructure/CollectionsExt/IndexedSet.cs b/UnityCollection/Assets/Infrastructure/CollectionsExt/IndexedSet.cs
new file mode 100644
index 0000000..9a9d865
--- /dev/null
+++ b/UnityCollection/Assets/Infrastructure/CollectionsExt/IndexedSet.cs
@@ -0,0 +1,162 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+
+namespace CollectionsExt
+{
+ // ҪƵʵIJ롢ɾеΨһݣʹṹȵListЧʸߣΪ
+ // ϣķʱO(1)ҪڱListȷΨһԵʱO(n)
+
+ // ռ任ʱ䣬ۺlist飩ֵ䣨ϣŵ㣬֧
+ // 1. ٵ
+ // 2. ٵIJɾ
+ // 3. Ψһֵ
+ // 4. ˳
+ // 5.
+ internal class IndexedSet<T> : IList<T>
+ {
+ //This is a container that gives:
+ // - Unique items
+ // - Fast random removal
+ // - Fast unique inclusion to the end
+ // - Sequential access
+ //Downsides:
+ // - Uses more memory
+ // - Ordering is not persistent
+ // - Not Serialization Friendly.
+
+ //We use a Dictionary to speed up list lookup, this makes it cheaper to guarantee no duplicates (set)
+ //When removing we move the last item to the removed item position, this way we only need to update the index cache of a single item. (fast removal)
+ //Order of the elements is not guaranteed. A removal will change the order of the items.
+
+ readonly List<T> m_List = new List<T>();
+ Dictionary<T, int> m_Dictionary = new Dictionary<T, int>();
+
+ public void Add(T item)
+ {
+ m_List.Add(item);
+ m_Dictionary.Add(item, m_List.Count - 1);
+ }
+
+ public bool AddUnique(T item)
+ {
+ if (m_Dictionary.ContainsKey(item))
+ return false;
+
+ m_List.Add(item);
+ m_Dictionary.Add(item, m_List.Count - 1);
+
+ return true;
+ }
+
+ public bool Remove(T item)
+ {
+ int index = -1;
+ if (!m_Dictionary.TryGetValue(item, out index))
+ return false;
+
+ RemoveAt(index);
+ return true;
+ }
+
+ public IEnumerator<T> GetEnumerator()
+ {
+ throw new System.NotImplementedException();
+ }
+
+ IEnumerator IEnumerable.GetEnumerator()
+ {
+ return GetEnumerator();
+ }
+
+ public void Clear()
+ {
+ m_List.Clear();
+ m_Dictionary.Clear();
+ }
+
+ public bool Contains(T item)
+ {
+ return m_Dictionary.ContainsKey(item);
+ }
+
+ public void CopyTo(T[] array, int arrayIndex)
+ {
+ m_List.CopyTo(array, arrayIndex);
+ }
+
+ public int Count { get { return m_List.Count; } }
+ public bool IsReadOnly { get { return false; } }
+ public int IndexOf(T item)
+ {
+ int index = -1;
+ m_Dictionary.TryGetValue(item, out index);
+ return index;
+ }
+
+ public void Insert(int index, T item)
+ {
+ //We could support this, but the semantics would be weird. Order is not guaranteed..
+ throw new NotSupportedException("Random Insertion is semantically invalid, since this structure does not guarantee ordering.");
+ }
+
+ public void RemoveAt(int index)
+ {
+ T item = m_List[index];
+ m_Dictionary.Remove(item);
+ if (index == m_List.Count - 1)
+ m_List.RemoveAt(index);
+ else
+ {
+ //
+ int replaceItemIndex = m_List.Count - 1;
+ T replaceItem = m_List[replaceItemIndex];
+ m_List[index] = replaceItem;
+ m_Dictionary[replaceItem] = index;
+ m_List.RemoveAt(replaceItemIndex);
+ }
+ }
+
+ public T this[int index]
+ {
+ get { return m_List[index]; }
+ set
+ {
+ T item = m_List[index];
+ m_Dictionary.Remove(item);
+ m_List[index] = value;
+ m_Dictionary.Add(item, index); // ⣬Ӧvalue
+ }
+ }
+
+ public void RemoveAll(Predicate<T> match)
+ {
+ //I guess this could be optmized by instead of removing the items from the list immediatly,
+ //We move them to the end, and then remove all in one go.
+ //But I don't think this is going to be the bottleneck, so leaving as is for now.
+ int i = 0;
+ while (i < m_List.Count)
+ {
+ T item = m_List[i];
+ if (match(item))
+ Remove(item);
+ else
+ i++;
+ }
+ }
+
+ //Sorts the internal list, this makes the exposed index accessor sorted as well.
+ //But note that any insertion or deletion, can unorder the collection again.
+ public void Sort(Comparison<T> sortLayoutFunction)
+ {
+ //There might be better ways to sort and keep the dictionary index up to date.
+ m_List.Sort(sortLayoutFunction);
+ //Rebuild the dictionary index.
+ for (int i = 0; i < m_List.Count; ++i)
+ {
+ T item = m_List[i];
+ m_Dictionary[item] = i;
+ }
+ }
+ }
+}