124 "lattice deformVert");
142 bool changed =
false;
151 if (cd_dvert_offset != -1) {
174 for (i = 0; i < me->
totvert; i++,
mv++, dv++) {
175 if (dv->
dw && (!use_selection || (
mv->flag &
SELECT))) {
191 for (i = 0, bp = lt->
def; i < tot; i++, bp++) {
192 if (!use_selection || (bp->
f1 &
SELECT)) {
211 bool changed =
false;
215 for (dg = defbase->
first; dg; dg = dg->
next) {
236 for (i = 1; i < idx; i++) {
239 for (i = idx + 1; i < defbase_tot; i++) {
258 if (active_index > def_nr) {
295 for (i = 0, dv = dvert_array; i < dvert_tot; i++, dv++) {
357 for (
a = 0, bp = lt->
def;
a < tot;
a++, bp++, dvert++) {
358 for (i = 0; i < dvert->totweight; i++) {
359 if (dvert->dw[i].def_nr > def_nr) {
360 dvert->dw[i].def_nr--;
445 *r_map_len,
sizeof(*vgroup_index_map),
"defgroup index map create");
446 bool is_vgroup_remap_needed =
false;
449 for (dg_src = src_defbase->
first, i = 0; dg_src; dg_src = dg_src->
next, i++) {
451 is_vgroup_remap_needed = is_vgroup_remap_needed || (vgroup_index_map[i] != i);
454 if (!is_vgroup_remap_needed) {
456 vgroup_index_map =
NULL;
460 return vgroup_index_map;
468 if (
map ==
NULL || map_len == 0) {
473 for (
int i = 0; i < dvert_len; i++, dv++) {
475 for (
int j = 0; j < totweight; j++) {
477 if ((
uint)def_nr < (
uint)map_len &&
map[def_nr] != -1) {
482 dv->
dw[j] = dv->
dw[totweight];
504 *dvert_arr = me->
dvert;
510 *dvert_arr = lt->
dvert;
530 bool is_locked =
false;
533 bool *lock_flags =
MEM_mallocN(defbase_tot *
sizeof(
bool),
"defflags");
536 for (i = 0, defgroup = defbase->
first; i < defbase_tot && defgroup;
537 defgroup = defgroup->
next, i++) {
539 is_locked |= lock_flags[i];
553 bool *defgroup_validmap;
566 for (dg = defbase->
first; dg; dg = dg->
next) {
602 defgroup_validmap =
MEM_mallocN(
sizeof(*defgroup_validmap) * defbase_tot,
"wpaint valid map");
605 for (dg = defbase->
first, i = 0; dg; dg = dg->
next, i++) {
613 return defgroup_validmap;
618 bool *dg_selection =
MEM_mallocN(defbase_tot *
sizeof(
bool), __func__);
622 (*r_dg_flags_sel_tot) = 0;
628 for (i = 0, defgroup = defbase->
first; i < defbase_tot && defgroup;
629 defgroup = defgroup->
next, i++) {
632 dg_selection[i] =
true;
633 (*r_dg_flags_sel_tot) += 1;
636 dg_selection[i] =
false;
641 memset(dg_selection,
false,
sizeof(*dg_selection) * defbase_tot);
648 const bool *validmap,
651 return validmap && validmap[index] && !(lock_flags && lock_flags[index]);
655 const bool *lock_flags,
656 const bool *selected,
659 if (lock_flags ==
NULL) {
663 if (selected ==
NULL || sel_tot <= 1) {
667 for (
int i = 0; i < defbase_tot; i++) {
668 if (selected[i] && lock_flags[i]) {
677 int defbase_tot,
const bool *locked,
const bool *deform,
bool *r_locked,
bool *r_unlocked)
680 if (r_unlocked != deform) {
681 memcpy(r_unlocked, deform,
sizeof(
bool) * defbase_tot);
684 memset(r_locked, 0,
sizeof(
bool) * defbase_tot);
689 for (
int i = 0; i < defbase_tot; i++) {
690 bool is_locked = locked[i];
691 bool is_deform = deform[i];
693 r_locked[i] = is_deform && is_locked;
694 r_unlocked[i] = is_deform && !is_locked;
700 const bool *dg_selection,
702 int *r_dg_flags_sel_tot)
710 for (i = 0, defgroup = defbase->
first; i < defbase_tot && defgroup;
711 defgroup = defgroup->
next, i++) {
712 if (dg_selection[i]) {
716 i_mirr =
STREQ(name_flip, defgroup->
name) ? i :
719 if ((i_mirr >= 0 && i_mirr < defbase_tot) && (dg_flags_sel[i_mirr] ==
false)) {
720 dg_flags_sel[i_mirr] =
true;
721 (*r_dg_flags_sel_tot) += 1;
732 bool *defgroup_validmap =
NULL;
736 switch (subset_type) {
739 defgroup_validmap =
MEM_mallocN(*r_defgroup_tot *
sizeof(*defgroup_validmap), __func__);
740 memset(defgroup_validmap,
false, *r_defgroup_tot *
sizeof(*defgroup_validmap));
741 if ((def_nr_active >= 0) && (def_nr_active < *r_defgroup_tot)) {
743 defgroup_validmap[def_nr_active] =
true;
758 for (i = 0; i < *r_defgroup_tot; i++) {
759 if (defgroup_validmap[i] ==
true) {
760 *r_subset_count += 1;
769 for (i = 0; i < *r_defgroup_tot; i++) {
770 defgroup_validmap[i] = !defgroup_validmap[i];
771 if (defgroup_validmap[i] ==
true) {
772 *r_subset_count += 1;
779 defgroup_validmap =
MEM_mallocN(*r_defgroup_tot *
sizeof(*defgroup_validmap), __func__);
780 memset(defgroup_validmap,
true, *r_defgroup_tot *
sizeof(*defgroup_validmap));
781 *r_subset_count = *r_defgroup_tot;
786 return defgroup_validmap;
790 const int defgroup_tot,
791 int *r_defgroup_subset_map)
794 for (i = 0; i < defgroup_tot; i++) {
795 if (defgroup_validmap[i]) {
796 r_defgroup_subset_map[j++] = i;
Blender kernel action and pose functionality.
struct bPoseChannel * BKE_pose_channel_find_name(const struct bPose *pose, const char *name)
bool CustomData_free_layer_active(struct CustomData *data, int type, int totelem)
void * CustomData_add_layer(struct CustomData *data, int type, eCDAllocType alloctype, void *layer, int totelem)
int CustomData_get_offset(const struct CustomData *data, int type)
void BKE_gpencil_vgroup_remove(struct Object *ob, struct bDeformGroup *defgroup)
struct ModifierData * BKE_modifiers_get_virtual_modifierlist(const struct Object *ob, struct VirtualModifierData *data)
General operations, lookup, etc. for blender objects.
void BKE_object_batch_cache_dirty_tag(struct Object *ob)
struct Object * BKE_object_pose_armature_get(struct Object *ob)
bool BKE_object_is_in_editmode_vgroup(const struct Object *ob)
GHash * BLI_ghash_str_new_ex(const char *info, unsigned int nentries_reserve) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT
void * BLI_ghash_lookup(const GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
unsigned int BLI_ghash_len(const GHash *gh) ATTR_WARN_UNUSED_RESULT
void BLI_ghash_insert(GHash *gh, void *key, void *val)
void ** BLI_ghash_lookup_p(GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
void BLI_ghash_free(GHash *gh, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
BLI_INLINE bool BLI_listbase_is_empty(const struct ListBase *lb)
void BLI_freelinkN(struct ListBase *listbase, void *vlink) ATTR_NONNULL(1)
int BLI_findindex(const struct ListBase *listbase, const void *vlink) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1)
int BLI_listbase_count(const struct ListBase *listbase) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1)
size_t BLI_string_flip_side_name(char *r_name, const char *from_name, bool strip_number, size_t name_len)
#define POINTER_FROM_INT(i)
Object is a sort of wrapper for general info.
#define OB_TYPE_SUPPORT_VGROUP(_type)
@ WT_VGROUP_BONE_DEFORM_OFF
Read Guarded memory(de)allocation.
#define MEM_reallocN(vmemh, len)
#define BM_ELEM_CD_GET_VOID_P(ele, offset)
#define BM_elem_flag_test(ele, hflag)
#define BM_ITER_MESH(ele, iter, bm, itype)
void *(* MEM_malloc_arrayN)(size_t len, size_t size, const char *str)
void(* MEM_freeN)(void *vmemh)
void *(* MEM_callocN)(size_t len, const char *str)
void *(* MEM_mallocN)(size_t len, const char *str)
SocketIndexByIdentifierMap * map
struct ClothSimSettings * sim_parms
struct MDeformVert * dvert
struct EditLatt * editlatt
struct BMEditMesh * edit_mesh
struct MDeformVert * dvert
struct ModifierData * next
struct ParticleSystem * next
struct bPoseChannel * next