Blender  V3.3
kdtree_impl.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 
7 #include "MEM_guardedalloc.h"
8 
9 #include "BLI_kdtree_impl.h"
10 #include "BLI_math.h"
11 #include "BLI_strict_flags.h"
12 #include "BLI_utildefines.h"
13 
14 #define _CONCAT_AUX(MACRO_ARG1, MACRO_ARG2) MACRO_ARG1##MACRO_ARG2
15 #define _CONCAT(MACRO_ARG1, MACRO_ARG2) _CONCAT_AUX(MACRO_ARG1, MACRO_ARG2)
16 #define BLI_kdtree_nd_(id) _CONCAT(KDTREE_PREFIX_ID, _##id)
17 
18 typedef struct KDTreeNode_head {
20  float co[KD_DIMS];
21  int index;
23 
24 typedef struct KDTreeNode {
26  float co[KD_DIMS];
27  int index;
28  uint d; /* range is only (0..KD_DIMS - 1) */
30 
31 struct KDTree {
35 #ifdef DEBUG
36  bool is_balanced; /* ensure we call balance first */
37  uint nodes_len_capacity; /* max size of the tree */
38 #endif
39 };
40 
41 #define KD_STACK_INIT 100 /* initial size for array (on the stack) */
42 #define KD_NEAR_ALLOC_INC 100 /* alloc increment for collecting nearest */
43 #define KD_FOUND_ALLOC_INC 50 /* alloc increment for collecting nearest */
44 
45 #define KD_NODE_UNSET ((uint)-1)
46 
51 #define KD_NODE_ROOT_IS_INIT ((uint)-2)
52 
53 /* -------------------------------------------------------------------- */
57 static void copy_vn_vn(float v0[KD_DIMS], const float v1[KD_DIMS])
58 {
59  for (uint j = 0; j < KD_DIMS; j++) {
60  v0[j] = v1[j];
61  }
62 }
63 
64 static float len_squared_vnvn(const float v0[KD_DIMS], const float v1[KD_DIMS])
65 {
66  float d = 0.0f;
67  for (uint j = 0; j < KD_DIMS; j++) {
68  d += square_f(v0[j] - v1[j]);
69  }
70  return d;
71 }
72 
73 static float len_squared_vnvn_cb(const float co_kdtree[KD_DIMS],
74  const float co_search[KD_DIMS],
75  const void *UNUSED(user_data))
76 {
77  return len_squared_vnvn(co_kdtree, co_search);
78 }
79 
85 KDTree *BLI_kdtree_nd_(new)(uint nodes_len_capacity)
86 {
87  KDTree *tree;
88 
89  tree = MEM_mallocN(sizeof(KDTree), "KDTree");
90  tree->nodes = MEM_mallocN(sizeof(KDTreeNode) * nodes_len_capacity, "KDTreeNode");
91  tree->nodes_len = 0;
92  tree->root = KD_NODE_ROOT_IS_INIT;
93 
94 #ifdef DEBUG
95  tree->is_balanced = false;
96  tree->nodes_len_capacity = nodes_len_capacity;
97 #endif
98 
99  return tree;
100 }
101 
103 {
104  if (tree) {
105  MEM_freeN(tree->nodes);
106  MEM_freeN(tree);
107  }
108 }
109 
113 void BLI_kdtree_nd_(insert)(KDTree *tree, int index, const float co[KD_DIMS])
114 {
115  KDTreeNode *node = &tree->nodes[tree->nodes_len++];
116 
117 #ifdef DEBUG
118  BLI_assert(tree->nodes_len <= tree->nodes_len_capacity);
119 #endif
120 
121  /* NOTE: array isn't calloc'd,
122  * need to initialize all struct members */
123 
124  node->left = node->right = KD_NODE_UNSET;
125  copy_vn_vn(node->co, co);
126  node->index = index;
127  node->d = 0;
128 
129 #ifdef DEBUG
130  tree->is_balanced = false;
131 #endif
132 }
133 
134 static uint kdtree_balance(KDTreeNode *nodes, uint nodes_len, uint axis, const uint ofs)
135 {
136  KDTreeNode *node;
137  float co;
138  uint left, right, median, i, j;
139 
140  if (nodes_len <= 0) {
141  return KD_NODE_UNSET;
142  }
143  else if (nodes_len == 1) {
144  return 0 + ofs;
145  }
146 
147  /* Quick-sort style sorting around median. */
148  left = 0;
149  right = nodes_len - 1;
150  median = nodes_len / 2;
151 
152  while (right > left) {
153  co = nodes[right].co[axis];
154  i = left - 1;
155  j = right;
156 
157  while (1) {
158  while (nodes[++i].co[axis] < co) { /* pass */
159  }
160  while (nodes[--j].co[axis] > co && j > left) { /* pass */
161  }
162 
163  if (i >= j) {
164  break;
165  }
166 
167  SWAP(KDTreeNode_head, *(KDTreeNode_head *)&nodes[i], *(KDTreeNode_head *)&nodes[j]);
168  }
169 
170  SWAP(KDTreeNode_head, *(KDTreeNode_head *)&nodes[i], *(KDTreeNode_head *)&nodes[right]);
171  if (i >= median) {
172  right = i - 1;
173  }
174  if (i <= median) {
175  left = i + 1;
176  }
177  }
178 
179  /* Set node and sort sub-nodes. */
180  node = &nodes[median];
181  node->d = axis;
182  axis = (axis + 1) % KD_DIMS;
183  node->left = kdtree_balance(nodes, median, axis, ofs);
184  node->right = kdtree_balance(
185  nodes + median + 1, (nodes_len - (median + 1)), axis, (median + 1) + ofs);
186 
187  return median + ofs;
188 }
189 
191 {
192  if (tree->root != KD_NODE_ROOT_IS_INIT) {
193  for (uint i = 0; i < tree->nodes_len; i++) {
194  tree->nodes[i].left = KD_NODE_UNSET;
195  tree->nodes[i].right = KD_NODE_UNSET;
196  }
197  }
198 
199  tree->root = kdtree_balance(tree->nodes, tree->nodes_len, 0, 0);
200 
201 #ifdef DEBUG
202  tree->is_balanced = true;
203 #endif
204 }
205 
206 static uint *realloc_nodes(uint *stack, uint *stack_len_capacity, const bool is_alloc)
207 {
208  uint *stack_new = MEM_mallocN((*stack_len_capacity + KD_NEAR_ALLOC_INC) * sizeof(uint),
209  "KDTree.treestack");
210  memcpy(stack_new, stack, *stack_len_capacity * sizeof(uint));
211  // memset(stack_new + *stack_len_capacity, 0, sizeof(uint) * KD_NEAR_ALLOC_INC);
212  if (is_alloc) {
213  MEM_freeN(stack);
214  }
215  *stack_len_capacity += KD_NEAR_ALLOC_INC;
216  return stack_new;
217 }
218 
223  const float co[KD_DIMS],
224  KDTreeNearest *r_nearest)
225 {
226  const KDTreeNode *nodes = tree->nodes;
227  const KDTreeNode *root, *min_node;
228  uint *stack, stack_default[KD_STACK_INIT];
229  float min_dist, cur_dist;
230  uint stack_len_capacity, cur = 0;
231 
232 #ifdef DEBUG
233  BLI_assert(tree->is_balanced == true);
234 #endif
235 
236  if (UNLIKELY(tree->root == KD_NODE_UNSET)) {
237  return -1;
238  }
239 
240  stack = stack_default;
241  stack_len_capacity = KD_STACK_INIT;
242 
243  root = &nodes[tree->root];
244  min_node = root;
245  min_dist = len_squared_vnvn(root->co, co);
246 
247  if (co[root->d] < root->co[root->d]) {
248  if (root->right != KD_NODE_UNSET) {
249  stack[cur++] = root->right;
250  }
251  if (root->left != KD_NODE_UNSET) {
252  stack[cur++] = root->left;
253  }
254  }
255  else {
256  if (root->left != KD_NODE_UNSET) {
257  stack[cur++] = root->left;
258  }
259  if (root->right != KD_NODE_UNSET) {
260  stack[cur++] = root->right;
261  }
262  }
263 
264  while (cur--) {
265  const KDTreeNode *node = &nodes[stack[cur]];
266 
267  cur_dist = node->co[node->d] - co[node->d];
268 
269  if (cur_dist < 0.0f) {
270  cur_dist = -cur_dist * cur_dist;
271 
272  if (-cur_dist < min_dist) {
273  cur_dist = len_squared_vnvn(node->co, co);
274  if (cur_dist < min_dist) {
275  min_dist = cur_dist;
276  min_node = node;
277  }
278  if (node->left != KD_NODE_UNSET) {
279  stack[cur++] = node->left;
280  }
281  }
282  if (node->right != KD_NODE_UNSET) {
283  stack[cur++] = node->right;
284  }
285  }
286  else {
287  cur_dist = cur_dist * cur_dist;
288 
289  if (cur_dist < min_dist) {
290  cur_dist = len_squared_vnvn(node->co, co);
291  if (cur_dist < min_dist) {
292  min_dist = cur_dist;
293  min_node = node;
294  }
295  if (node->right != KD_NODE_UNSET) {
296  stack[cur++] = node->right;
297  }
298  }
299  if (node->left != KD_NODE_UNSET) {
300  stack[cur++] = node->left;
301  }
302  }
303  if (UNLIKELY(cur + KD_DIMS > stack_len_capacity)) {
304  stack = realloc_nodes(stack, &stack_len_capacity, stack_default != stack);
305  }
306  }
307 
308  if (r_nearest) {
309  r_nearest->index = min_node->index;
310  r_nearest->dist = sqrtf(min_dist);
311  copy_vn_vn(r_nearest->co, min_node->co);
312  }
313 
314  if (stack != stack_default) {
315  MEM_freeN(stack);
316  }
317 
318  return min_node->index;
319 }
320 
329  const KDTree *tree,
330  const float co[KD_DIMS],
331  int (*filter_cb)(void *user_data, int index, const float co[KD_DIMS], float dist_sq),
332  void *user_data,
333  KDTreeNearest *r_nearest)
334 {
335  const KDTreeNode *nodes = tree->nodes;
336  const KDTreeNode *min_node = NULL;
337 
338  uint *stack, stack_default[KD_STACK_INIT];
339  float min_dist = FLT_MAX, cur_dist;
340  uint stack_len_capacity, cur = 0;
341 
342 #ifdef DEBUG
343  BLI_assert(tree->is_balanced == true);
344 #endif
345 
346  if (UNLIKELY(tree->root == KD_NODE_UNSET)) {
347  return -1;
348  }
349 
350  stack = stack_default;
351  stack_len_capacity = ARRAY_SIZE(stack_default);
352 
353 #define NODE_TEST_NEAREST(node) \
354  { \
355  const float dist_sq = len_squared_vnvn((node)->co, co); \
356  if (dist_sq < min_dist) { \
357  const int result = filter_cb(user_data, (node)->index, (node)->co, dist_sq); \
358  if (result == 1) { \
359  min_dist = dist_sq; \
360  min_node = node; \
361  } \
362  else if (result == 0) { \
363  /* pass */ \
364  } \
365  else { \
366  BLI_assert(result == -1); \
367  goto finally; \
368  } \
369  } \
370  } \
371  ((void)0)
372 
373  stack[cur++] = tree->root;
374 
375  while (cur--) {
376  const KDTreeNode *node = &nodes[stack[cur]];
377 
378  cur_dist = node->co[node->d] - co[node->d];
379 
380  if (cur_dist < 0.0f) {
381  cur_dist = -cur_dist * cur_dist;
382 
383  if (-cur_dist < min_dist) {
385 
386  if (node->left != KD_NODE_UNSET) {
387  stack[cur++] = node->left;
388  }
389  }
390  if (node->right != KD_NODE_UNSET) {
391  stack[cur++] = node->right;
392  }
393  }
394  else {
395  cur_dist = cur_dist * cur_dist;
396 
397  if (cur_dist < min_dist) {
399 
400  if (node->right != KD_NODE_UNSET) {
401  stack[cur++] = node->right;
402  }
403  }
404  if (node->left != KD_NODE_UNSET) {
405  stack[cur++] = node->left;
406  }
407  }
408  if (UNLIKELY(cur + KD_DIMS > stack_len_capacity)) {
409  stack = realloc_nodes(stack, &stack_len_capacity, stack_default != stack);
410  }
411  }
412 
413 #undef NODE_TEST_NEAREST
414 
415 finally:
416  if (stack != stack_default) {
417  MEM_freeN(stack);
418  }
419 
420  if (min_node) {
421  if (r_nearest) {
422  r_nearest->index = min_node->index;
423  r_nearest->dist = sqrtf(min_dist);
424  copy_vn_vn(r_nearest->co, min_node->co);
425  }
426 
427  return min_node->index;
428  }
429  else {
430  return -1;
431  }
432 }
433 
435  uint *nearest_len,
436  const uint nearest_len_capacity,
437  const int index,
438  const float dist,
439  const float co[KD_DIMS])
440 {
441  uint i;
442 
443  if (*nearest_len < nearest_len_capacity) {
444  (*nearest_len)++;
445  }
446 
447  for (i = *nearest_len - 1; i > 0; i--) {
448  if (dist >= nearest[i - 1].dist) {
449  break;
450  }
451  else {
452  nearest[i] = nearest[i - 1];
453  }
454  }
455 
456  nearest[i].index = index;
457  nearest[i].dist = dist;
458  copy_vn_vn(nearest[i].co, co);
459 }
460 
467  const KDTree *tree,
468  const float co[KD_DIMS],
469  KDTreeNearest r_nearest[],
470  const uint nearest_len_capacity,
471  float (*len_sq_fn)(const float co_search[KD_DIMS],
472  const float co_test[KD_DIMS],
473  const void *user_data),
474  const void *user_data)
475 {
476  const KDTreeNode *nodes = tree->nodes;
477  const KDTreeNode *root;
478  uint *stack, stack_default[KD_STACK_INIT];
479  float cur_dist;
480  uint stack_len_capacity, cur = 0;
481  uint i, nearest_len = 0;
482 
483 #ifdef DEBUG
484  BLI_assert(tree->is_balanced == true);
485 #endif
486 
487  if (UNLIKELY((tree->root == KD_NODE_UNSET) || nearest_len_capacity == 0)) {
488  return 0;
489  }
490 
491  if (len_sq_fn == NULL) {
492  len_sq_fn = len_squared_vnvn_cb;
494  }
495 
496  stack = stack_default;
497  stack_len_capacity = ARRAY_SIZE(stack_default);
498 
499  root = &nodes[tree->root];
500 
501  cur_dist = len_sq_fn(co, root->co, user_data);
503  r_nearest, &nearest_len, nearest_len_capacity, root->index, cur_dist, root->co);
504 
505  if (co[root->d] < root->co[root->d]) {
506  if (root->right != KD_NODE_UNSET) {
507  stack[cur++] = root->right;
508  }
509  if (root->left != KD_NODE_UNSET) {
510  stack[cur++] = root->left;
511  }
512  }
513  else {
514  if (root->left != KD_NODE_UNSET) {
515  stack[cur++] = root->left;
516  }
517  if (root->right != KD_NODE_UNSET) {
518  stack[cur++] = root->right;
519  }
520  }
521 
522  while (cur--) {
523  const KDTreeNode *node = &nodes[stack[cur]];
524 
525  cur_dist = node->co[node->d] - co[node->d];
526 
527  if (cur_dist < 0.0f) {
528  cur_dist = -cur_dist * cur_dist;
529 
530  if (nearest_len < nearest_len_capacity || -cur_dist < r_nearest[nearest_len - 1].dist) {
531  cur_dist = len_sq_fn(co, node->co, user_data);
532 
533  if (nearest_len < nearest_len_capacity || cur_dist < r_nearest[nearest_len - 1].dist) {
535  r_nearest, &nearest_len, nearest_len_capacity, node->index, cur_dist, node->co);
536  }
537 
538  if (node->left != KD_NODE_UNSET) {
539  stack[cur++] = node->left;
540  }
541  }
542  if (node->right != KD_NODE_UNSET) {
543  stack[cur++] = node->right;
544  }
545  }
546  else {
547  cur_dist = cur_dist * cur_dist;
548 
549  if (nearest_len < nearest_len_capacity || cur_dist < r_nearest[nearest_len - 1].dist) {
550  cur_dist = len_sq_fn(co, node->co, user_data);
551  if (nearest_len < nearest_len_capacity || cur_dist < r_nearest[nearest_len - 1].dist) {
553  r_nearest, &nearest_len, nearest_len_capacity, node->index, cur_dist, node->co);
554  }
555 
556  if (node->right != KD_NODE_UNSET) {
557  stack[cur++] = node->right;
558  }
559  }
560  if (node->left != KD_NODE_UNSET) {
561  stack[cur++] = node->left;
562  }
563  }
564  if (UNLIKELY(cur + KD_DIMS > stack_len_capacity)) {
565  stack = realloc_nodes(stack, &stack_len_capacity, stack_default != stack);
566  }
567  }
568 
569  for (i = 0; i < nearest_len; i++) {
570  r_nearest[i].dist = sqrtf(r_nearest[i].dist);
571  }
572 
573  if (stack != stack_default) {
574  MEM_freeN(stack);
575  }
576 
577  return (int)nearest_len;
578 }
579 
581  const float co[KD_DIMS],
582  KDTreeNearest r_nearest[],
583  uint nearest_len_capacity)
584 {
586  tree, co, r_nearest, nearest_len_capacity, NULL, NULL);
587 }
588 
589 static int nearest_cmp_dist(const void *a, const void *b)
590 {
591  const KDTreeNearest *kda = a;
592  const KDTreeNearest *kdb = b;
593 
594  if (kda->dist < kdb->dist) {
595  return -1;
596  }
597  else if (kda->dist > kdb->dist) {
598  return 1;
599  }
600  else {
601  return 0;
602  }
603 }
604 static void nearest_add_in_range(KDTreeNearest **r_nearest,
605  uint nearest_index,
606  uint *nearest_len_capacity,
607  const int index,
608  const float dist,
609  const float co[KD_DIMS])
610 {
611  KDTreeNearest *to;
612 
613  if (UNLIKELY(nearest_index >= *nearest_len_capacity)) {
614  *r_nearest = MEM_reallocN_id(
615  *r_nearest, (*nearest_len_capacity += KD_FOUND_ALLOC_INC) * sizeof(KDTreeNode), __func__);
616  }
617 
618  to = (*r_nearest) + nearest_index;
619 
620  to->index = index;
621  to->dist = sqrtf(dist);
622  copy_vn_vn(to->co, co);
623 }
624 
631  const KDTree *tree,
632  const float co[KD_DIMS],
633  KDTreeNearest **r_nearest,
634  const float range,
635  float (*len_sq_fn)(const float co_search[KD_DIMS],
636  const float co_test[KD_DIMS],
637  const void *user_data),
638  const void *user_data)
639 {
640  const KDTreeNode *nodes = tree->nodes;
641  uint *stack, stack_default[KD_STACK_INIT];
642  KDTreeNearest *nearest = NULL;
643  const float range_sq = range * range;
644  float dist_sq;
645  uint stack_len_capacity, cur = 0;
646  uint nearest_len = 0, nearest_len_capacity = 0;
647 
648 #ifdef DEBUG
649  BLI_assert(tree->is_balanced == true);
650 #endif
651 
652  if (UNLIKELY(tree->root == KD_NODE_UNSET)) {
653  return 0;
654  }
655 
656  if (len_sq_fn == NULL) {
657  len_sq_fn = len_squared_vnvn_cb;
659  }
660 
661  stack = stack_default;
662  stack_len_capacity = ARRAY_SIZE(stack_default);
663 
664  stack[cur++] = tree->root;
665 
666  while (cur--) {
667  const KDTreeNode *node = &nodes[stack[cur]];
668 
669  if (co[node->d] + range < node->co[node->d]) {
670  if (node->left != KD_NODE_UNSET) {
671  stack[cur++] = node->left;
672  }
673  }
674  else if (co[node->d] - range > node->co[node->d]) {
675  if (node->right != KD_NODE_UNSET) {
676  stack[cur++] = node->right;
677  }
678  }
679  else {
680  dist_sq = len_sq_fn(co, node->co, user_data);
681  if (dist_sq <= range_sq) {
683  &nearest, nearest_len++, &nearest_len_capacity, node->index, dist_sq, node->co);
684  }
685 
686  if (node->left != KD_NODE_UNSET) {
687  stack[cur++] = node->left;
688  }
689  if (node->right != KD_NODE_UNSET) {
690  stack[cur++] = node->right;
691  }
692  }
693 
694  if (UNLIKELY(cur + KD_DIMS > stack_len_capacity)) {
695  stack = realloc_nodes(stack, &stack_len_capacity, stack_default != stack);
696  }
697  }
698 
699  if (stack != stack_default) {
700  MEM_freeN(stack);
701  }
702 
703  if (nearest_len) {
704  qsort(nearest, nearest_len, sizeof(KDTreeNearest), nearest_cmp_dist);
705  }
706 
707  *r_nearest = nearest;
708 
709  return (int)nearest_len;
710 }
711 
713  const float co[KD_DIMS],
714  KDTreeNearest **r_nearest,
715  float range)
716 {
717  return BLI_kdtree_nd_(range_search_with_len_squared_cb)(tree, co, r_nearest, range, NULL, NULL);
718 }
719 
730  const KDTree *tree,
731  const float co[KD_DIMS],
732  float range,
733  bool (*search_cb)(void *user_data, int index, const float co[KD_DIMS], float dist_sq),
734  void *user_data)
735 {
736  const KDTreeNode *nodes = tree->nodes;
737 
738  uint *stack, stack_default[KD_STACK_INIT];
739  float range_sq = range * range, dist_sq;
740  uint stack_len_capacity, cur = 0;
741 
742 #ifdef DEBUG
743  BLI_assert(tree->is_balanced == true);
744 #endif
745 
746  if (UNLIKELY(tree->root == KD_NODE_UNSET)) {
747  return;
748  }
749 
750  stack = stack_default;
751  stack_len_capacity = ARRAY_SIZE(stack_default);
752 
753  stack[cur++] = tree->root;
754 
755  while (cur--) {
756  const KDTreeNode *node = &nodes[stack[cur]];
757 
758  if (co[node->d] + range < node->co[node->d]) {
759  if (node->left != KD_NODE_UNSET) {
760  stack[cur++] = node->left;
761  }
762  }
763  else if (co[node->d] - range > node->co[node->d]) {
764  if (node->right != KD_NODE_UNSET) {
765  stack[cur++] = node->right;
766  }
767  }
768  else {
769  dist_sq = len_squared_vnvn(node->co, co);
770  if (dist_sq <= range_sq) {
771  if (search_cb(user_data, node->index, node->co, dist_sq) == false) {
772  goto finally;
773  }
774  }
775 
776  if (node->left != KD_NODE_UNSET) {
777  stack[cur++] = node->left;
778  }
779  if (node->right != KD_NODE_UNSET) {
780  stack[cur++] = node->right;
781  }
782  }
783 
784  if (UNLIKELY(cur + KD_DIMS > stack_len_capacity)) {
785  stack = realloc_nodes(stack, &stack_len_capacity, stack_default != stack);
786  }
787  }
788 
789 finally:
790  if (stack != stack_default) {
791  MEM_freeN(stack);
792  }
793 }
794 
799 static uint *kdtree_order(const KDTree *tree)
800 {
801  const KDTreeNode *nodes = tree->nodes;
802  uint *order = MEM_mallocN(sizeof(uint) * tree->nodes_len, __func__);
803  for (uint i = 0; i < tree->nodes_len; i++) {
804  order[nodes[i].index] = i;
805  }
806  return order;
807 }
808 
809 /* -------------------------------------------------------------------- */
814  /* Static */
816  float range;
817  float range_sq;
820 
821  /* Per Search */
823  int search;
824 };
825 
826 static void deduplicate_recursive(const struct DeDuplicateParams *p, uint i)
827 {
828  const KDTreeNode *node = &p->nodes[i];
829  if (p->search_co[node->d] + p->range <= node->co[node->d]) {
830  if (node->left != KD_NODE_UNSET) {
831  deduplicate_recursive(p, node->left);
832  }
833  }
834  else if (p->search_co[node->d] - p->range >= node->co[node->d]) {
835  if (node->right != KD_NODE_UNSET) {
836  deduplicate_recursive(p, node->right);
837  }
838  }
839  else {
840  if ((p->search != node->index) && (p->duplicates[node->index] == -1)) {
841  if (len_squared_vnvn(node->co, p->search_co) <= p->range_sq) {
842  p->duplicates[node->index] = (int)p->search;
843  *p->duplicates_found += 1;
844  }
845  }
846  if (node->left != KD_NODE_UNSET) {
847  deduplicate_recursive(p, node->left);
848  }
849  if (node->right != KD_NODE_UNSET) {
850  deduplicate_recursive(p, node->right);
851  }
852  }
853 }
854 
874  const float range,
875  bool use_index_order,
876  int *duplicates)
877 {
878  int found = 0;
879  struct DeDuplicateParams p = {
880  .nodes = tree->nodes,
881  .range = range,
882  .range_sq = square_f(range),
883  .duplicates = duplicates,
884  .duplicates_found = &found,
885  };
886 
887  if (use_index_order) {
889  for (uint i = 0; i < tree->nodes_len; i++) {
890  const uint node_index = order[i];
891  const int index = (int)i;
892  if (ELEM(duplicates[index], -1, index)) {
893  p.search = index;
894  copy_vn_vn(p.search_co, tree->nodes[node_index].co);
895  int found_prev = found;
896  deduplicate_recursive(&p, tree->root);
897  if (found != found_prev) {
898  /* Prevent chains of doubles. */
899  duplicates[index] = index;
900  }
901  }
902  }
903  MEM_freeN(order);
904  }
905  else {
906  for (uint i = 0; i < tree->nodes_len; i++) {
907  const uint node_index = i;
908  const int index = p.nodes[node_index].index;
909  if (ELEM(duplicates[index], -1, index)) {
910  p.search = index;
911  copy_vn_vn(p.search_co, tree->nodes[node_index].co);
912  int found_prev = found;
913  deduplicate_recursive(&p, tree->root);
914  if (found != found_prev) {
915  /* Prevent chains of doubles. */
916  duplicates[index] = index;
917  }
918  }
919  }
920  }
921  return found;
922 }
923 
926 /* -------------------------------------------------------------------- */
930 static int kdtree_cmp_bool(const bool a, const bool b)
931 {
932  if (a == b) {
933  return 0;
934  }
935  return b ? -1 : 1;
936 }
937 
938 static int kdtree_node_cmp_deduplicate(const void *n0_p, const void *n1_p)
939 {
940  const KDTreeNode *n0 = n0_p;
941  const KDTreeNode *n1 = n1_p;
942  for (uint j = 0; j < KD_DIMS; j++) {
943  if (n0->co[j] < n1->co[j]) {
944  return -1;
945  }
946  else if (n0->co[j] > n1->co[j]) {
947  return 1;
948  }
949  }
950 
951  if (n0->d != KD_DIMS && n1->d != KD_DIMS) {
952  /* Two nodes share identical `co`
953  * Both are still valid.
954  * Cast away `const` and tag one of them as invalid. */
955  ((KDTreeNode *)n1)->d = KD_DIMS;
956  }
957 
958  /* Keep sorting until each unique value has one and only one valid node. */
959  return kdtree_cmp_bool(n0->d == KD_DIMS, n1->d == KD_DIMS);
960 }
961 
968 {
969 #ifdef DEBUG
970  tree->is_balanced = false;
971 #endif
972  qsort(tree->nodes, (size_t)tree->nodes_len, sizeof(*tree->nodes), kdtree_node_cmp_deduplicate);
973  uint j = 0;
974  for (uint i = 0; i < tree->nodes_len; i++) {
975  if (tree->nodes[i].d != KD_DIMS) {
976  if (i != j) {
977  tree->nodes[j] = tree->nodes[i];
978  }
979  j++;
980  }
981  }
982  tree->nodes_len = j;
983  return (int)tree->nodes_len;
984 }
985 
typedef float(TangentPoint)[2]
#define BLI_assert(a)
Definition: BLI_assert.h:46
#define KD_DIMS
Definition: BLI_kdtree.h:44
A KD-tree for nearest neighbor search.
MINLINE float square_f(float a)
Strict compiler flags for areas of code we want to ensure don't do conversions without us knowing abo...
unsigned int uint
Definition: BLI_sys_types.h:67
#define ARRAY_SIZE(arr)
#define SWAP(type, a, b)
#define UNUSED(x)
#define UNLIKELY(x)
#define ELEM(...)
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint GLsizei GLsizei GLenum type _GL_VOID_RET _GL_VOID GLsizei GLenum GLenum const void *pixels _GL_VOID_RET _GL_VOID const void *pointer _GL_VOID_RET _GL_VOID GLdouble v _GL_VOID_RET _GL_VOID GLfloat v _GL_VOID_RET _GL_VOID GLint GLint i2 _GL_VOID_RET _GL_VOID GLint j _GL_VOID_RET _GL_VOID GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLdouble right
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint GLsizei GLsizei GLenum type _GL_VOID_RET _GL_VOID GLsizei GLenum GLenum const void *pixels _GL_VOID_RET _GL_VOID const void *pointer _GL_VOID_RET _GL_VOID GLdouble v _GL_VOID_RET _GL_VOID GLfloat v _GL_VOID_RET _GL_VOID GLint GLint i2 _GL_VOID_RET _GL_VOID GLint j _GL_VOID_RET _GL_VOID GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble GLdouble GLdouble zFar _GL_VOID_RET _GL_UINT GLdouble *equation _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLenum GLfloat *v _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLfloat *values _GL_VOID_RET _GL_VOID GLushort *values _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLenum GLdouble *params _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_BOOL GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLenum GLfloat param _GL_VOID_RET _GL_VOID GLenum GLint param _GL_VOID_RET _GL_VOID GLushort pattern _GL_VOID_RET _GL_VOID GLdouble GLdouble GLint GLint order
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint GLsizei GLsizei GLenum type _GL_VOID_RET _GL_VOID GLsizei GLenum GLenum const void *pixels _GL_VOID_RET _GL_VOID const void *pointer _GL_VOID_RET _GL_VOID GLdouble v _GL_VOID_RET _GL_VOID GLfloat v _GL_VOID_RET _GL_VOID GLint GLint i2 _GL_VOID_RET _GL_VOID GLint j _GL_VOID_RET _GL_VOID GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble GLdouble GLdouble zFar _GL_VOID_RET _GL_UINT GLdouble *equation _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLenum GLfloat *v _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLfloat *values _GL_VOID_RET _GL_VOID GLushort *values _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLenum GLdouble *params _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_BOOL GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLenum GLfloat param _GL_VOID_RET _GL_VOID GLenum GLint param _GL_VOID_RET _GL_VOID GLushort pattern _GL_VOID_RET _GL_VOID GLdouble GLdouble GLint GLint const GLdouble *points _GL_VOID_RET _GL_VOID GLdouble GLdouble GLint GLint GLdouble v1
Read Guarded memory(de)allocation.
OperationNode * node
void * user_data
void * tree
static int kdtree_cmp_bool(const bool a, const bool b)
Definition: kdtree_impl.h:930
int BLI_kdtree_nd_() range_search(const KDTree *tree, const float co[KD_DIMS], KDTreeNearest **r_nearest, float range)
Definition: kdtree_impl.h:712
static void deduplicate_recursive(const struct DeDuplicateParams *p, uint i)
Definition: kdtree_impl.h:826
int BLI_kdtree_nd_() calc_duplicates_fast(const KDTree *tree, const float range, bool use_index_order, int *duplicates)
Definition: kdtree_impl.h:873
int BLI_kdtree_nd_() find_nearest_n_with_len_squared_cb(const KDTree *tree, const float co[KD_DIMS], KDTreeNearest r_nearest[], const uint nearest_len_capacity, float(*len_sq_fn)(const float co_search[KD_DIMS], const float co_test[KD_DIMS], const void *user_data), const void *user_data)
Definition: kdtree_impl.h:466
#define KD_NEAR_ALLOC_INC
Definition: kdtree_impl.h:42
#define KD_NODE_ROOT_IS_INIT
Definition: kdtree_impl.h:51
void BLI_kdtree_nd_() range_search_cb(const KDTree *tree, const float co[KD_DIMS], float range, bool(*search_cb)(void *user_data, int index, const float co[KD_DIMS], float dist_sq), void *user_data)
Definition: kdtree_impl.h:729
int BLI_kdtree_nd_() find_nearest(const KDTree *tree, const float co[KD_DIMS], KDTreeNearest *r_nearest)
Definition: kdtree_impl.h:222
void BLI_kdtree_nd_() balance(KDTree *tree)
Definition: kdtree_impl.h:190
static uint * kdtree_order(const KDTree *tree)
Definition: kdtree_impl.h:799
static uint kdtree_balance(KDTreeNode *nodes, uint nodes_len, uint axis, const uint ofs)
Definition: kdtree_impl.h:134
#define NODE_TEST_NEAREST(node)
static uint * realloc_nodes(uint *stack, uint *stack_len_capacity, const bool is_alloc)
Definition: kdtree_impl.h:206
int BLI_kdtree_nd_() deduplicate(KDTree *tree)
Definition: kdtree_impl.h:967
void BLI_kdtree_nd_() insert(KDTree *tree, int index, const float co[KD_DIMS])
Definition: kdtree_impl.h:113
static int kdtree_node_cmp_deduplicate(const void *n0_p, const void *n1_p)
Definition: kdtree_impl.h:938
static void copy_vn_vn(float v0[KD_DIMS], const float v1[KD_DIMS])
Definition: kdtree_impl.h:57
struct KDTreeNode KDTreeNode
static void nearest_ordered_insert(KDTreeNearest *nearest, uint *nearest_len, const uint nearest_len_capacity, const int index, const float dist, const float co[KD_DIMS])
Definition: kdtree_impl.h:434
struct KDTreeNode_head KDTreeNode_head
void BLI_kdtree_nd_() free(KDTree *tree)
Definition: kdtree_impl.h:102
static float len_squared_vnvn_cb(const float co_kdtree[KD_DIMS], const float co_search[KD_DIMS], const void *UNUSED(user_data))
Definition: kdtree_impl.h:73
#define KD_NODE_UNSET
Definition: kdtree_impl.h:45
int BLI_kdtree_nd_() range_search_with_len_squared_cb(const KDTree *tree, const float co[KD_DIMS], KDTreeNearest **r_nearest, const float range, float(*len_sq_fn)(const float co_search[KD_DIMS], const float co_test[KD_DIMS], const void *user_data), const void *user_data)
Definition: kdtree_impl.h:630
static int nearest_cmp_dist(const void *a, const void *b)
Definition: kdtree_impl.h:589
#define KD_FOUND_ALLOC_INC
Definition: kdtree_impl.h:43
static void nearest_add_in_range(KDTreeNearest **r_nearest, uint nearest_index, uint *nearest_len_capacity, const int index, const float dist, const float co[KD_DIMS])
Definition: kdtree_impl.h:604
int BLI_kdtree_nd_() find_nearest_n(const KDTree *tree, const float co[KD_DIMS], KDTreeNearest r_nearest[], uint nearest_len_capacity)
Definition: kdtree_impl.h:580
int BLI_kdtree_nd_() find_nearest_cb(const KDTree *tree, const float co[KD_DIMS], int(*filter_cb)(void *user_data, int index, const float co[KD_DIMS], float dist_sq), void *user_data, KDTreeNearest *r_nearest)
Definition: kdtree_impl.h:328
#define KD_STACK_INIT
Definition: kdtree_impl.h:41
static float len_squared_vnvn(const float v0[KD_DIMS], const float v1[KD_DIMS])
Definition: kdtree_impl.h:64
#define BLI_kdtree_nd_(id)
Definition: kdtree_impl.h:16
void(* MEM_freeN)(void *vmemh)
Definition: mallocn.c:27
void *(* MEM_reallocN_id)(void *vmemh, size_t len, const char *str)
Definition: mallocn.c:29
void *(* MEM_mallocN)(size_t len, const char *str)
Definition: mallocn.c:33
static int left
#define sqrtf(x)
Definition: metal/compat.h:243
static unsigned a[3]
Definition: RandGen.cpp:78
static const pxr::TfToken b("b", pxr::TfToken::Immortal)
const KDTreeNode * nodes
Definition: kdtree_impl.h:815
float search_co[KD_DIMS]
Definition: kdtree_impl.h:822
float co[KD_DIMS]
float co[KD_DIMS]
Definition: kdtree_impl.h:20
float co[KD_DIMS]
Definition: kdtree_impl.h:26
uint right
Definition: kdtree_impl.h:25
uint root
Definition: kdtree_impl.h:34
uint nodes_len
Definition: kdtree_impl.h:33
KDTreeNode * nodes
Definition: kdtree_impl.h:32