Blender  V3.3
outliner_treehash.c
Go to the documentation of this file.
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 
9 #include <stdlib.h>
10 #include <string.h>
11 
12 #include "BLI_ghash.h"
13 #include "BLI_mempool.h"
14 #include "BLI_utildefines.h"
15 
16 #include "DNA_outliner_types.h"
17 
18 #include "BKE_outliner_treehash.h"
19 
20 #include "MEM_guardedalloc.h"
21 
22 typedef struct TseGroup {
24  /* Index of last used #TreeStoreElem item, to speed up search for another one. */
25  int lastused;
26  /* Counter used to reduce the amount of 'rests' of `lastused` index, otherwise search for unused
27  * item is exponential and becomes critically slow when there are a lot of items in the group. */
29  /* Number of items currently in use. */
30  int size;
31  /* Number of items currently allocated. */
32  int allocated;
34 
35 /* Only allow reset of #TseGroup.lastused counter to 0 once every 1k search. */
36 #define TSEGROUP_LASTUSED_RESET_VALUE 10000
37 
38 /* Allocate structure for TreeStoreElements;
39  * Most of elements in treestore have no duplicates,
40  * so there is no need to preallocate memory for more than one pointer */
42 {
43  TseGroup *tse_group = MEM_mallocN(sizeof(TseGroup), "TseGroup");
44  tse_group->elems = MEM_mallocN(sizeof(TreeStoreElem *), "TseGroupElems");
45  tse_group->size = 0;
46  tse_group->allocated = 1;
47  tse_group->lastused = 0;
48  return tse_group;
49 }
50 
51 static void tse_group_add_element(TseGroup *tse_group, TreeStoreElem *elem)
52 {
53  if (UNLIKELY(tse_group->size == tse_group->allocated)) {
54  tse_group->allocated *= 2;
55  tse_group->elems = MEM_reallocN(tse_group->elems,
56  sizeof(TreeStoreElem *) * tse_group->allocated);
57  }
58  tse_group->elems[tse_group->size] = elem;
59  tse_group->lastused = tse_group->size;
60  tse_group->size++;
61 }
62 
63 static void tse_group_remove_element(TseGroup *tse_group, TreeStoreElem *elem)
64 {
65  int min_allocated = MAX2(1, tse_group->allocated / 2);
66  BLI_assert(tse_group->allocated == 1 || (tse_group->allocated % 2) == 0);
67 
68  tse_group->size--;
69  BLI_assert(tse_group->size >= 0);
70  for (int i = 0; i < tse_group->size; i++) {
71  if (tse_group->elems[i] == elem) {
72  memcpy(tse_group->elems[i],
73  tse_group->elems[i + 1],
74  (tse_group->size - (i + 1)) * sizeof(TreeStoreElem *));
75  break;
76  }
77  }
78 
79  if (UNLIKELY(tse_group->size > 0 && tse_group->size <= min_allocated)) {
80  tse_group->allocated = min_allocated;
81  tse_group->elems = MEM_reallocN(tse_group->elems,
82  sizeof(TreeStoreElem *) * tse_group->allocated);
83  }
84 }
85 
86 static void tse_group_free(TseGroup *tse_group)
87 {
88  MEM_freeN(tse_group->elems);
89  MEM_freeN(tse_group);
90 }
91 
92 static unsigned int tse_hash(const void *ptr)
93 {
94  const TreeStoreElem *tse = ptr;
95  union {
96  short h_pair[2];
97  unsigned int u_int;
98  } hash;
99 
100  BLI_assert((tse->type != TSE_SOME_ID) || !tse->nr);
101 
102  hash.h_pair[0] = tse->type;
103  hash.h_pair[1] = tse->nr;
104 
105  hash.u_int ^= BLI_ghashutil_ptrhash(tse->id);
106 
107  return hash.u_int;
108 }
109 
110 static bool tse_cmp(const void *a, const void *b)
111 {
112  const TreeStoreElem *tse_a = a;
113  const TreeStoreElem *tse_b = b;
114  return tse_a->type != tse_b->type || tse_a->nr != tse_b->nr || tse_a->id != tse_b->id;
115 }
116 
117 static void fill_treehash(void *treehash, BLI_mempool *treestore)
118 {
119  TreeStoreElem *tselem;
120  BLI_mempool_iter iter;
121  BLI_mempool_iternew(treestore, &iter);
122 
123  BLI_assert(treehash);
124 
125  while ((tselem = BLI_mempool_iterstep(&iter))) {
126  BKE_outliner_treehash_add_element(treehash, tselem);
127  }
128 }
129 
131 {
132  GHash *treehash = BLI_ghash_new_ex(tse_hash, tse_cmp, "treehash", BLI_mempool_len(treestore));
133  fill_treehash(treehash, treestore);
134  return treehash;
135 }
136 
137 static void free_treehash_group(void *key)
138 {
139  tse_group_free(key);
140 }
141 
143 {
144  GHashIterator gh_iter;
145 
146  GHASH_ITER (gh_iter, treehash) {
147  TseGroup *group = BLI_ghashIterator_getValue(&gh_iter);
148  group->lastused = 0;
149  group->lastused_reset_count = 0;
150  }
151 }
152 
154 {
155  BLI_assert(treehash);
156 
158  fill_treehash(treehash, treestore);
159  return treehash;
160 }
161 
163 {
164  TseGroup *group;
165  void **val_p;
166 
167  if (!BLI_ghash_ensure_p(treehash, elem, &val_p)) {
168  *val_p = tse_group_create();
169  }
170  group = *val_p;
171  tse_group_add_element(group, elem);
172 }
173 
175 {
176  TseGroup *group = BLI_ghash_lookup(treehash, elem);
177 
178  BLI_assert(group != NULL);
179  if (group->size <= 1) {
180  /* one element -> remove group completely */
181  BLI_ghash_remove(treehash, elem, NULL, free_treehash_group);
182  }
183  else {
184  tse_group_remove_element(group, elem);
185  }
186 }
187 
188 static TseGroup *BKE_outliner_treehash_lookup_group(GHash *th, short type, short nr, struct ID *id)
189 {
190  TreeStoreElem tse_template;
191  tse_template.type = type;
192  tse_template.nr = (type == TSE_SOME_ID) ? 0 : nr; /* we're picky! :) */
193  tse_template.id = id;
194 
195  BLI_assert(th);
196 
197  return BLI_ghash_lookup(th, &tse_template);
198 }
199 
201  short type,
202  short nr,
203  struct ID *id)
204 {
205  TseGroup *group;
206 
207  BLI_assert(treehash);
208 
209  group = BKE_outliner_treehash_lookup_group(treehash, type, nr, id);
210  if (group) {
211  /* Find unused element, with optimization to start from previously
212  * found element assuming we do repeated lookups. */
213  int size = group->size;
214  int offset = group->lastused;
215 
216  for (int i = 0; i < size; i++, offset++) {
217  /* Once at the end of the array of items, in most cases it just means that all items are
218  * used, so only check the whole array once every TSEGROUP_LASTUSED_RESET_VALUE times. */
219  if (offset >= size) {
221  group->lastused_reset_count++;
222  group->lastused = group->size - 1;
223  break;
224  }
225  group->lastused_reset_count = 0;
226  offset = 0;
227  }
228 
229  if (!group->elems[offset]->used) {
230  group->lastused = offset;
231  return group->elems[offset];
232  }
233  }
234  }
235  return NULL;
236 }
237 
239  short type,
240  short nr,
241  struct ID *id)
242 {
243  TseGroup *group;
244 
245  BLI_assert(treehash);
246 
247  group = BKE_outliner_treehash_lookup_group(treehash, type, nr, id);
248  return group ? group->elems[0] : NULL;
249 }
250 
251 void BKE_outliner_treehash_free(void *treehash)
252 {
253  BLI_assert(treehash);
254 
256 }
#define BLI_assert(a)
Definition: BLI_assert.h:46
unsigned int BLI_ghashutil_ptrhash(const void *key)
BLI_INLINE void * BLI_ghashIterator_getValue(GHashIterator *ghi) ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.h:302
#define GHASH_ITER(gh_iter_, ghash_)
Definition: BLI_ghash.h:321
void * BLI_ghash_lookup(const GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:734
GHash * BLI_ghash_new_ex(GHashHashFP hashfp, GHashCmpFP cmpfp, const char *info, unsigned int nentries_reserve) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:681
bool BLI_ghash_remove(GHash *gh, const void *key, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
Definition: BLI_ghash.c:790
void BLI_ghash_free(GHash *gh, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
Definition: BLI_ghash.c:863
void BLI_ghash_clear_ex(GHash *gh, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp, unsigned int nentries_reserve)
Definition: BLI_ghash.c:845
bool BLI_ghash_ensure_p(GHash *gh, void *key, void ***r_val) ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:755
void BLI_mempool_iternew(BLI_mempool *pool, BLI_mempool_iter *iter) ATTR_NONNULL()
Definition: BLI_mempool.c:498
void * BLI_mempool_iterstep(BLI_mempool_iter *iter) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL()
Definition: BLI_mempool.c:577
int BLI_mempool_len(const BLI_mempool *pool) ATTR_NONNULL(1)
Definition: BLI_mempool.c:434
#define MAX2(a, b)
#define UNLIKELY(x)
#define LIKELY(x)
@ TSE_SOME_ID
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum type
Read Guarded memory(de)allocation.
#define MEM_reallocN(vmemh, len)
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition: btDbvt.cpp:52
ccl_gpu_kernel_postfix ccl_global float int int int int float bool int offset
void(* MEM_freeN)(void *vmemh)
Definition: mallocn.c:27
void *(* MEM_mallocN)(size_t len, const char *str)
Definition: mallocn.c:33
static unsigned a[3]
Definition: RandGen.cpp:78
static const pxr::TfToken b("b", pxr::TfToken::Immortal)
#define hash
Definition: noise.c:153
static void tse_group_free(TseGroup *tse_group)
static void tse_group_add_element(TseGroup *tse_group, TreeStoreElem *elem)
static unsigned int tse_hash(const void *ptr)
static TseGroup * BKE_outliner_treehash_lookup_group(GHash *th, short type, short nr, struct ID *id)
void BKE_outliner_treehash_clear_used(void *treehash)
static TseGroup * tse_group_create(void)
#define TSEGROUP_LASTUSED_RESET_VALUE
void * BKE_outliner_treehash_create_from_treestore(BLI_mempool *treestore)
static void free_treehash_group(void *key)
TreeStoreElem * BKE_outliner_treehash_lookup_unused(void *treehash, short type, short nr, struct ID *id)
static void tse_group_remove_element(TseGroup *tse_group, TreeStoreElem *elem)
struct TseGroup TseGroup
static bool tse_cmp(const void *a, const void *b)
void * BKE_outliner_treehash_rebuild_from_treestore(void *treehash, BLI_mempool *treestore)
void BKE_outliner_treehash_free(void *treehash)
TreeStoreElem * BKE_outliner_treehash_lookup_any(void *treehash, short type, short nr, struct ID *id)
static void fill_treehash(void *treehash, BLI_mempool *treestore)
void BKE_outliner_treehash_add_element(void *treehash, TreeStoreElem *elem)
void BKE_outliner_treehash_remove_element(void *treehash, TreeStoreElem *elem)
Definition: DNA_ID.h:368
int lastused_reset_count
TreeStoreElem ** elems
PointerRNA * ptr
Definition: wm_files.c:3480