24 #define KDOP_TREE_TYPE 4
25 #define KDOP_AXIS_LEN 14
26 #define BLI_STACK_PAIR_LEN (2 * KDOP_TREE_TYPE)
65 BMVert *v_test =
ELEM((*e_iter)->v1, e_next->
v1, e_next->
v2) ? (*e_iter)->v2 : (*e_iter)->v1;
67 int verts_len =
data->edgenet_len - 1;
68 for (
int i = verts_len; i--; e_iter++) {
83 const float test_edgenet_range_on_face_normal =
max -
min;
85 data->best_edgenet_range_on_face_normal = test_edgenet_range_on_face_normal;
86 data->r_best_face = f;
99 float *v_a_co =
data[0];
100 float *v_a_b_dir =
data[1];
101 const float range_min = -FLT_EPSILON;
102 const float range_max = 1.0f + FLT_EPSILON;
111 if (
IN_RANGE(lambda_b, range_min, range_max)) {
117 return IN_RANGE(lambda_b, range_min, range_max);
143 .best_edgenet_range_on_face_normal = FLT_MAX,
149 if (
data.r_best_face) {
151 float no[3],
min = FLT_MAX,
max = -FLT_MAX;
165 if (face_range_on_normal <
data.best_edgenet_range_on_face_normal) {
209 r_pair_elem->
vert =
v;
214 int *r_data_cut_edges_len,
217 r_pair_elem->
edge =
e;
218 r_pair_elem->
lambda = lambda;
237 int *data_cut_edges_len,
241 float dist_sq_vert_factor;
250 dist_sq_vert_factor = lambda;
254 dist_sq_vert_factor = 1.0f - lambda;
259 if (dist_sq_vert < data_dist_sq) {
268 if (dist_sq < data_dist_sq) {
296 if (index_a < index_b) {
310 float co[3], dir[3], lambda;
319 pair[0] = pair_tmp[0];
320 pair[1] = pair_tmp[1];
333 const float dir_a[3],
335 const float dir_b[3],
340 float dist_sq_va_factor, dist_sq_vb_factor;
342 if (lambda_a < 0.5f) {
344 dist_sq_va_factor = lambda_a;
348 dist_sq_va_factor = 1.0f - lambda_a;
351 if (lambda_b < 0.5f) {
353 dist_sq_vb_factor = lambda_b;
357 dist_sq_vb_factor = 1.0f - lambda_b;
360 if (e_a_v != e_b_v) {
369 if (dist_sq_va < data->dist_sq || dist_sq_vb < data->dist_sq) {
374 float near_a[3], near_b[3];
379 if (dist_sq < data->dist_sq) {
399 float co_a[3], dir_a[3], co_b[3], dir_b[3];
406 float lambda_a, lambda_b;
411 data, e_a, e_b, co_a, dir_a, co_b, dir_b, lambda_a, lambda_b, pair_tmp)) {
413 pair[0] = pair_tmp[0];
414 pair[1] = pair_tmp[1];
424 if (index_a < index_b) {
440 for (
int i = 0; i < parallel_tasks_num; i++) {
441 if (pair_stack[i] ==
NULL) {
445 data->pair_stack = pair_stack;
455 const int index1 = *(
int *)index1_v;
456 const int index2 = *(
int *)index2_v;
458 if (pair_flat[index1].
lambda > pair_flat[index2].
lambda) {
467 #define INTERSECT_EDGES
470 BMesh *
bm,
const char hflag,
const float dist,
const bool split_faces,
GHash *r_targetmap)
484 BLI_Stack **pair_stack_vertxvert = pair_stack;
487 const float dist_sq =
square_f(dist);
488 const float dist_half = dist / 2;
501 int verts_act_len = 0, verts_remain_len = 0;
524 if (verts_remain_len) {
529 if (tree_verts_act || tree_verts_remain) {
532 if (tree_verts_act) {
541 if (tree_verts_act) {
548 if (tree_verts_remain) {
552 if (tree_verts_act && tree_verts_remain) {
559 if (pair_stack_vertxvert[i]) {
564 #ifdef INTERSECT_EDGES
565 uint vertxvert_pair_len = pair_len;
567 # define EDGE_ACT_TO_TEST 1
568 # define EDGE_REMAIN_TO_TEST 2
570 int edges_act_len = 0, edges_remain_len = 0;
603 if (edges_remain_len && (tree_edges_act || tree_verts_act)) {
608 if (tree_edges_act || tree_edges_remain) {
626 # ifdef INTERSECT_EDGES_DEBUG
634 # undef EDGE_ACT_TO_TEST
635 # undef EDGE_REMAIN_TO_TEST
640 if (tree_edges_act) {
644 if (tree_edges_remain) {
648 int edgexedge_pair_len = 0;
649 if (tree_edges_act) {
654 if (tree_edges_remain) {
660 if (pair_stack_edgexelem[i]) {
665 if (tree_verts_act) {
671 if (tree_verts_remain) {
680 if (tree_verts_act && tree_edges_remain) {
688 int edgexelem_pair_len = 0;
690 if (pair_stack_edgexelem[i]) {
695 pair_len += edgexelem_pair_len;
696 int edgexvert_pair_len = edgexelem_pair_len - edgexedge_pair_len;
698 if (edgexelem_pair_len) {
699 pair_array =
MEM_mallocN(
sizeof(*pair_array) * pair_len, __func__);
701 pair_iter = pair_array;
717 } * e_map_iter, *e_map;
719 # ifdef INTERSECT_EDGES_DEBUG
720 int cut_edges_len = 0;
729 size_t e_map_size = (
data.cut_edges_len *
sizeof(*e_map)) +
730 (((
size_t)2 * edgexedge_pair_len + edgexvert_pair_len) *
731 sizeof(*(e_map->cuts_index)));
742 pair_flat = (
struct EDBMSplitElem *)&pair_array[vertxvert_pair_len];
743 pair_flat_iter = &pair_flat[0];
744 uint pair_flat_len = 2 * edgexelem_pair_len;
745 for (i = 0; i < pair_flat_len; i++, pair_flat_iter++) {
750 e = pair_flat_iter->
edge;
755 e_map_iter = (
void *)&e_map->as_int[map_len];
756 e_map_iter->cuts_len = e_cuts_len;
757 e_map_iter->cuts_index[0] = i;
761 map_len += 1 + e_cuts_len;
769 for (i = 0; i < map_len;
770 e_map_iter = (
void *)&e_map->as_int[i], i += 1 + e_map_iter->cuts_len) {
774 e_map_iter->cuts_len,
775 sizeof(*(e_map->cuts_index)),
779 float lambda, lambda_prev = 0.0f;
780 for (
int j = 0; j < e_map_iter->cuts_len; j++) {
781 uint index = e_map_iter->cuts_index[j];
784 lambda = (pair_elem->
lambda - lambda_prev) / (1.0f - lambda_prev);
785 lambda_prev = pair_elem->
lambda;
794 pair_elem->
vert = v_new;
807 if (pair_len && pair_array ==
NULL) {
808 pair_array =
MEM_mallocN(
sizeof(*pair_array) * pair_len, __func__);
809 pair_iter = pair_array;
821 pair_iter = &pair_array[0];
822 for (i = 0; i < pair_len; i++, pair_iter++) {
826 v_key = (*pair_iter)[0].vert;
827 v_val = (*pair_iter)[1].vert;
839 pair_iter = &pair_array[0];
840 for (i = 0; i < pair_len; i++, pair_iter++) {
841 v_key = (*pair_iter)[0].vert;
842 v_val = (*pair_iter)[1].vert;
847 if (v_val != (*pair_iter)[1].
vert) {
849 *v_val_p = (*pair_iter)[1].vert = v_val;
860 int edgenet_alloc_len = 0;
875 if (v_cut == -1 && v_cut_other == -1) {
893 v_cut += v_cut % 2 ? -1 : 1;
894 va_dest = pair_flat[v_cut].
vert;
904 BMVert *v_other_dest, *v_other = vb;
908 if (v_cut_other != -1) {
909 v_cut_other += v_cut_other % 2 ? -1 : 1;
910 v_other_dest = pair_flat[v_cut_other].
vert;
921 v_other_dest = v_other;
924 if (va_dest == v_other_dest) {
933 if (edgenet_alloc_len == edgenet_len) {
934 edgenet_alloc_len = (edgenet_alloc_len + 1) * 2;
935 edgenet =
MEM_reallocN(edgenet, (edgenet_alloc_len) *
sizeof(*edgenet));
937 edgenet[edgenet_len++] = e_net;
940 va_dest, v_other_dest, edgenet, edgenet_len, dist);
952 if (edgenet_len > 1) {
960 if ((edgenet_len > 1) && (v_other_dest != v_other) &&
969 e_net = edgenet[edgenet_len - 1];
996 if (e_next ==
NULL) {
1002 if (v_other == va) {
1011 int face_arr_len = 0;
1016 while (face_arr_len--) {
1033 if (pair_stack[i]) {
typedef float(TangentPoint)[2]
void * BLI_ghash_lookup(const GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
void BLI_ghash_insert(GHash *gh, void *key, void *val)
void ** BLI_ghash_lookup_p(GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
@ BVH_OVERLAP_USE_THREADING
void BLI_bvhtree_balance(BVHTree *tree)
BVHTree * BLI_bvhtree_new(int maxsize, float epsilon, char tree_type, char axis)
BVHTreeOverlap * BLI_bvhtree_overlap_ex(const BVHTree *tree1, const BVHTree *tree2, uint *r_overlap_num, BVHTree_OverlapCallback callback, void *userdata, uint max_interactions, int flag)
void BLI_bvhtree_free(BVHTree *tree)
void BLI_bvhtree_insert(BVHTree *tree, int index, const float co[3], int numpoints)
int BLI_bvhtree_overlap_thread_num(const BVHTree *tree)
bool(* BVHTree_OverlapCallback)(void *userdata, int index_a, int index_b, int thread)
MINLINE float square_f(float a)
bool isect_ray_ray_v3(const float ray_origin_a[3], const float ray_direction_a[3], const float ray_origin_b[3], const float ray_direction_b[3], float *r_lambda_a, float *r_lambda_b)
float ray_point_factor_v3_ex(const float p[3], const float ray_origin[3], const float ray_direction[3], float epsilon, float fallback)
bool isect_ray_ray_epsilon_v3(const float ray_origin_a[3], const float ray_direction_a[3], const float ray_origin_b[3], const float ray_direction_b[3], float epsilon, float *r_lambda_a, float *r_lambda_b)
MINLINE float len_squared_v3(const float v[3]) ATTR_WARN_UNUSED_RESULT
MINLINE float len_squared_v3v3(const float a[3], const float b[3]) ATTR_WARN_UNUSED_RESULT
MINLINE void sub_v3_v3v3(float r[3], const float a[3], const float b[3])
MINLINE void copy_v3_v3(float r[3], const float a[3])
MINLINE float dot_v3v3(const float a[3], const float b[3]) ATTR_WARN_UNUSED_RESULT
MINLINE void madd_v3_v3v3fl(float r[3], const float a[3], const float b[3], float f)
void BLI_qsort_r(void *a, size_t n, size_t es, BLI_sort_cmp_t cmp, void *thunk)
void BLI_stack_pop_n_reverse(BLI_Stack *stack, void *dst, unsigned int n) ATTR_NONNULL()
void * BLI_stack_push_r(BLI_Stack *stack) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL()
size_t BLI_stack_count(const BLI_Stack *stack) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL()
void BLI_stack_free(BLI_Stack *stack) ATTR_NONNULL()
#define BLI_stack_new(esize, descr)
#define IN_RANGE(a, b, c)
#define IN_RANGE_INCL(a, b, c)
Read Guarded memory(de)allocation.
#define MEM_reallocN(vmemh, len)
Provides wrapper around system-specific atomic primitives, and some extensions (faked-atomic operatio...
ATOMIC_INLINE int32_t atomic_fetch_and_add_int32(int32_t *p, int32_t x)
#define BM_DISK_EDGE_NEXT(e, v)
BMEdge * BM_edge_create(BMesh *bm, BMVert *v1, BMVert *v2, const BMEdge *e_example, const eBMCreateFlag create_flag)
Main function for creating a new edge.
#define BM_elem_index_get(ele)
#define BM_elem_flag_disable(ele, hflag)
#define BM_elem_index_set(ele, index)
#define BM_elem_flag_test(ele, hflag)
#define BM_elem_flag_enable(ele, hflag)
bool BM_mesh_intersect_edges(BMesh *bm, const char hflag, const float dist, const bool split_faces, GHash *r_targetmap)
static bool bm_vertxvert_self_isect_cb(void *userdata, int index_a, int index_b, int thread)
static void bm_vert_pair_elem_setup_ex(BMVert *v, struct EDBMSplitElem *r_pair_elem)
static bool bm_edgexvert_isect_impl(BMVert *v, BMEdge *e, const float co[3], const float dir[3], float lambda, float data_dist_sq, int *data_cut_edges_len, struct EDBMSplitElem r_pair[2])
static bool bm_vert_pair_share_best_splittable_face_cb(BMFace *f, BMLoop *l_a, BMLoop *l_b, void *userdata)
static bool bm_edgexedge_self_isect_cb(void *userdata, int index_a, int index_b, int thread)
static bool bm_edgexedge_isect_impl(struct EDBMSplitData *data, BMEdge *e_a, BMEdge *e_b, const float co_a[3], const float dir_a[3], const float co_b[3], const float dir_b[3], float lambda_a, float lambda_b, struct EDBMSplitElem r_pair[2])
static bool bm_vert_pair_share_splittable_face_cb(BMFace *UNUSED(f), BMLoop *l_a, BMLoop *l_b, void *userdata)
static int sort_cmp_by_lambda_cb(const void *index1_v, const void *index2_v, void *keys_v)
#define EDGE_REMAIN_TO_TEST
#define BLI_STACK_PAIR_LEN
static bool bm_vertxvert_isect_cb(void *userdata, int index_a, int index_b, int thread)
static BMFace * bm_vert_pair_best_face_get(BMVert *v_a, BMVert *v_b, BMEdge **edgenet, const int edgenet_len, const float epsilon)
static bool bm_edgexedge_isect_cb(void *userdata, int index_a, int index_b, int thread)
static bool bm_edgexvert_isect_cb(void *userdata, int index_a, int index_b, int thread)
static void bm_elemxelem_bvhtree_overlap(const BVHTree *tree1, const BVHTree *tree2, BVHTree_OverlapCallback callback, struct EDBMSplitData *data, BLI_Stack **pair_stack)
static void bm_edge_pair_elem_setup(BMEdge *e, float lambda, int *r_data_cut_edges_len, struct EDBMSplitElem *r_pair_elem)
#define BM_ITER_ELEM(ele, iter, data, itype)
#define BM_ITER_MESH(ele, iter, bm, itype)
#define BM_ITER_MESH_INDEX(ele, iter, bm, itype, indexvar)
ATTR_WARN_UNUSED_RESULT BMesh * bm
void BM_mesh_elem_table_ensure(BMesh *bm, const char htype)
BLI_INLINE BMEdge * BM_edge_at_index(BMesh *bm, const int index)
BLI_INLINE BMVert * BM_vert_at_index(BMesh *bm, const int index)
BMVert * BM_edge_split(BMesh *bm, BMEdge *e, BMVert *v, BMEdge **r_e, float fac)
Edge Split.
bool BM_face_point_inside_test(const BMFace *f, const float co[3])
void BM_face_normal_update(BMFace *f)
bool BM_face_split_edgenet(BMesh *bm, BMFace *f, BMEdge **edge_net, const int edge_net_len, BMFace ***r_face_arr, int *r_face_arr_len)
BMEdge * BM_edge_exists(BMVert *v_a, BMVert *v_b)
bool BM_vert_pair_share_face_check(BMVert *v_a, BMVert *v_b)
bool BM_edge_in_face(const BMEdge *e, const BMFace *f)
bool BM_edge_share_vert_check(BMEdge *e1, BMEdge *e2)
BMFace * BM_vert_pair_shared_face_cb(BMVert *v_a, BMVert *v_b, const bool allow_adjacent, bool(*callback)(BMFace *, BMLoop *, BMLoop *, void *userdata), void *user_data, BMLoop **r_l_a, BMLoop **r_l_b)
BLI_INLINE BMVert * BM_edge_other_vert(BMEdge *e, const BMVert *v) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL()
BLI_INLINE bool BM_edge_is_wire(const BMEdge *e) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL()
ATTR_WARN_UNUSED_RESULT const BMVert const BMEdge * e
ATTR_WARN_UNUSED_RESULT const BMLoop * l_b
ATTR_WARN_UNUSED_RESULT const BMVert * v
bool closest(btVector3 &v)
DEGForeachIDComponentCallback callback
void(* MEM_freeN)(void *vmemh)
void *(* MEM_mallocN)(size_t len, const char *str)
T dot(const vec_base< T, Size > &a, const vec_base< T, Size > &b)
float best_edgenet_range_on_face_normal
ccl_device_inline int as_int(uint i)