|
import bpy |
|
import mathutils |
|
from mathutils import Vector, Matrix |
|
|
|
import os |
|
import sys |
|
import random |
|
import numpy as np |
|
import json |
|
import argparse |
|
|
|
|
|
IMPORT_FUNCTIONS = { |
|
"obj": bpy.ops.wm.obj_import, |
|
"glb": bpy.ops.import_scene.gltf, |
|
"gltf": bpy.ops.import_scene.gltf, |
|
"usd": bpy.ops.import_scene.usd, |
|
"fbx": bpy.ops.import_scene.fbx, |
|
"stl": bpy.ops.import_mesh.stl, |
|
"usda": bpy.ops.import_scene.usda, |
|
"dae": bpy.ops.wm.collada_import, |
|
"ply": bpy.ops.import_mesh.ply, |
|
"abc": bpy.ops.wm.alembic_import, |
|
"blend": bpy.ops.wm.append, |
|
} |
|
|
|
def load_object(object_path: str) -> None: |
|
"""Loads a model with a supported file extension into the scene. |
|
|
|
Args: |
|
object_path (str): Path to the model file. |
|
|
|
Raises: |
|
ValueError: If the file extension is not supported. |
|
|
|
Returns: |
|
None |
|
""" |
|
file_extension = object_path.split(".")[-1].lower() |
|
if file_extension is None: |
|
raise ValueError(f"Unsupported file type: {object_path}") |
|
|
|
|
|
import_function = IMPORT_FUNCTIONS[file_extension] |
|
|
|
if file_extension == "blend": |
|
import_function(directory=object_path, link=False) |
|
elif file_extension in {"glb", "gltf"}: |
|
import_function(filepath=object_path, merge_vertices=True) |
|
else: |
|
import_function(filepath=object_path) |
|
|
|
|
|
def save_json(output_path, mesh_obj, armature_obj, extra=None, arm_name=False): |
|
|
|
os.makedirs(output_path, exist_ok=True) |
|
|
|
|
|
|
|
|
|
total_armature_info = {} |
|
for obj in armature_obj: |
|
|
|
|
|
armature_info = {} |
|
armature_info["world_matrix"] = [list(row) for row in obj.matrix_world.copy()] |
|
translation = obj.matrix_world.translation |
|
for bone in obj.pose.bones: |
|
bone_info = {} |
|
bone_info["head_local"] = list(bone.head.copy()) |
|
bone_info["head_world"] = list((obj.matrix_world.to_3x3() @ bone.head+translation).copy()) |
|
|
|
bone_info["tail_local"] = list(bone.tail.copy()) |
|
bone_info["tail_world"] = list((obj.matrix_world.to_3x3() @ bone.tail+translation).copy()) |
|
|
|
if bone.parent: |
|
bone_info["parent"] = bone.parent.name.replace(" ", "_") |
|
if arm_name: |
|
bone_info["parent"] = obj.name + "--" + bone_info["parent"] |
|
else: |
|
bone_info["parent"] = None |
|
bone_info["children"] = [] |
|
if bone.children: |
|
for child in bone.children: |
|
if arm_name: |
|
bone_info["children"].append(obj.name + "--" + child.name.replace(" ", "_")) |
|
else: |
|
bone_info["children"].append(child.name.replace(" ", "_")) |
|
bone_name = bone.name.replace(" ", "_") |
|
if arm_name: |
|
bone_name = obj.name + "--" + bone_name |
|
armature_info[bone_name] = bone_info |
|
obj_name = obj.name.replace(" ", "_") |
|
total_armature_info[obj.name] = armature_info |
|
|
|
|
|
|
|
total_skinning_info = {} |
|
for obj in mesh_obj: |
|
vertex_groups = obj.vertex_groups |
|
|
|
|
|
|
|
skinning_info = {} |
|
skinning_info["world_matrix"] = [list(row) for row in obj.matrix_world.copy()] |
|
weight_info = [] |
|
for vertex in obj.data.vertices: |
|
vertex_info = {} |
|
for group in vertex.groups: |
|
name = vertex_groups[group.group].name |
|
name = name.replace(" ", "_") |
|
if arm_name: |
|
arm_modifier = [modifier for modifier in obj.modifiers if modifier.type == 'ARMATURE'] |
|
assert(len(arm_modifier) == 1) |
|
name = arm_modifier[0].object.name + "--" + name |
|
weight = group.weight |
|
vertex_info[name] = weight |
|
weight_info.append(vertex_info) |
|
skinning_info["weight"] = weight_info |
|
obj_name = obj.name.replace(" ", "_") |
|
total_skinning_info[obj_name]=skinning_info |
|
|
|
|
|
rigging_file_path = os.path.join(output_path, "rigging.json") |
|
if extra: |
|
rigging_file_path = rigging_file_path.replace("rigging.json", f'rigging_{extra}.json') |
|
with open(rigging_file_path, "w") as f: |
|
json.dump(total_armature_info, f, indent = 2) |
|
|
|
skining_file_path = os.path.join(output_path, "skining.json") |
|
if extra: |
|
skining_file_path = skining_file_path.replace("skining.json", f'skining_{extra}.json') |
|
with open(skining_file_path, "w") as f: |
|
json.dump(total_skinning_info, f , indent = 2) |
|
|
|
|
|
return rigging_file_path |
|
|
|
|
|
def apply_skinning_weights(json_file): |
|
|
|
with open(json_file, "r") as f: |
|
skinning_data = json.load(f) |
|
|
|
armature_obj = bpy.data.objects.get("Armature") |
|
if not armature_obj: |
|
print("Error: Armature object 'Armature' not found.") |
|
return |
|
|
|
|
|
count = 0 |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == 'MESH': |
|
obj.parent = armature_obj |
|
count += 1 |
|
|
|
print("total mesh count:", count) |
|
|
|
for obj in bpy.context.scene.objects: |
|
vertex_index = 0 |
|
if obj.type == 'MESH': |
|
mesh_name = obj.name |
|
if mesh_name in skinning_data: |
|
skinning_info = skinning_data[mesh_name] |
|
if "weight" in skinning_info: |
|
print("Applying skinning data for mesh:", mesh_name) |
|
vertex_index = 0 |
|
for vertex_weight in skinning_info["weight"]: |
|
for bone_name, weight_value in vertex_weight.items(): |
|
vertex_group = obj.vertex_groups.get(bone_name) |
|
if vertex_group is None: |
|
vertex_group = obj.vertex_groups.new(name=bone_name) |
|
print("Vertex group created:", bone_name) |
|
vertex_group.add([vertex_index], weight_value, 'REPLACE') |
|
vertex_index += 1 |
|
else: |
|
print("No skinning data found for mesh:", mesh_name) |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == 'MESH': |
|
modifier = obj.modifiers.new(name="Armature", type='ARMATURE') |
|
modifier.object = armature_obj |
|
modifier.use_vertex_groups = True |
|
print("Armature modifier added to mesh:", obj.name) |
|
|
|
def reload_rigging(rigging_file_path): |
|
with open(rigging_file_path, "r") as f: |
|
total_armature_info = json.load(f) |
|
|
|
bpy.ops.object.armature_add() |
|
armature_obj = bpy.context.object |
|
armature_obj.name = "Armature" |
|
|
|
bpy.ops.object.mode_set(mode='EDIT') |
|
bpy.ops.armature.select_all(action='SELECT') |
|
bpy.ops.armature.delete() |
|
bpy.ops.object.mode_set(mode='OBJECT') |
|
bpy.ops.object.mode_set(mode='EDIT') |
|
|
|
world_matrix = mathutils.Matrix([[1, 0, 0, 0], |
|
[0, 1, 0, 0], |
|
[0, 0, 1, 0], |
|
[0, 0, 0, 1]]) |
|
armature_obj.matrix_world = world_matrix |
|
|
|
for armature_name, armature_info in total_armature_info.items(): |
|
for bone_name, bone_info in armature_info.items(): |
|
if bone_name == "world_matrix": |
|
continue |
|
bone = armature_obj.data.edit_bones.new(bone_name) |
|
bone.head = bone_info["head_world"] |
|
bone.tail = bone_info["tail_world"] |
|
|
|
for bone_name, bone_info in armature_info.items(): |
|
if bone_name == "world_matrix": |
|
continue |
|
bone = armature_obj.data.edit_bones[bone_name] |
|
parent_name = bone_info["parent"] |
|
if parent_name: |
|
parent_bone = armature_obj.data.edit_bones[parent_name] |
|
bone.parent = parent_bone |
|
edit_len = len(armature_obj.data.edit_bones.keys()) |
|
bpy.ops.object.mode_set(mode='OBJECT') |
|
bone_len = len(armature_obj.data.bones.keys()) |
|
assert(edit_len == bone_len, "bone number not match!" + str(edit_len) + " " + str(bone_len)) |
|
bpy.ops.object.select_all(action='DESELECT') |
|
armature_obj.select_set(True) |
|
bpy.context.view_layer.objects.active = armature_obj |
|
print("Rigging information has been reloaded!") |
|
|
|
|
|
def reload_json(folder_path, version=0, export = None): |
|
bpy.ops.wm.read_homefile(use_empty=True) |
|
if version == 0: |
|
obj_path = os.path.join(folder_path, "object.obj") |
|
skinning_file_path = os.path.join(folder_path, "skining.json") |
|
rigging_file_path = os.path.join(folder_path, "rigging.json") |
|
elif version == 1: |
|
obj_path = os.path.join(folder_path, "join.obj") |
|
skinning_file_path = os.path.join(folder_path, "skining_norig.json") |
|
rigging_file_path = os.path.join(folder_path, "rigging_norig.json") |
|
elif version == 2: |
|
obj_path = os.path.join(folder_path, "join.obj") |
|
skinning_file_path = os.path.join(folder_path, "skining_norig2.json") |
|
rigging_file_path = os.path.join(folder_path, "rigging_norig2.json") |
|
|
|
load_object(obj_path) |
|
reload_rigging(rigging_file_path) |
|
apply_skinning_weights(skinning_file_path) |
|
if export: |
|
bpy.ops.wm.save_as_mainfile(filepath=export) |
|
print("Done!") |
|
|
|
|
|
def reset_scene() -> None: |
|
"""Resets the scene to a clean state. |
|
|
|
Returns: |
|
None |
|
""" |
|
|
|
for obj in bpy.data.objects: |
|
if obj.type not in {"CAMERA", "LIGHT"}: |
|
bpy.data.objects.remove(obj, do_unlink=True) |
|
|
|
|
|
for material in bpy.data.materials: |
|
bpy.data.materials.remove(material, do_unlink=True) |
|
|
|
|
|
for texture in bpy.data.textures: |
|
bpy.data.textures.remove(texture, do_unlink=True) |
|
|
|
|
|
for image in bpy.data.images: |
|
bpy.data.images.remove(image, do_unlink=True) |
|
|
|
|
|
def save_mesh(path, mtl=False, obj_path=None): |
|
if mtl: |
|
|
|
bpy.ops.wm.save_as_mainfile(filepath=obj_path + '/object.blend') |
|
|
|
bpy.ops.wm.open_mainfile(filepath=obj_path + '/object.blend') |
|
|
|
bpy.ops.file.unpack_all(method='WRITE_LOCAL') |
|
|
|
bpy.ops.wm.obj_export(filepath=path, export_materials=mtl, export_uv=mtl, export_triangulated_mesh=True) |
|
|
|
|
|
def get_root_obj(obj): |
|
if not obj.parent: |
|
return obj |
|
return get_root_obj(obj.parent) |
|
|
|
def normalize(objs): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
objs_eval = [] |
|
depsgraph = bpy.context.evaluated_depsgraph_get() |
|
for obj in objs: |
|
objs_eval.append(obj.evaluated_get(depsgraph)) |
|
|
|
vertices = [] |
|
for obj in objs_eval: |
|
for v in obj.data.vertices: |
|
vertices.append(obj.matrix_world @ Vector((v.co.x, v.co.y, v.co.z, 1))) |
|
|
|
vertices = np.array(vertices) |
|
min_x, min_y, min_z, _ = np.min(vertices, axis=0) |
|
max_x, max_y, max_z, _ = np.max(vertices, axis=0) |
|
|
|
|
|
|
|
|
|
scale_x = 1 / (max_x - min_x) |
|
scale_y = 1 / (max_y - min_y) |
|
scale_z = 1 / (max_z - min_z) |
|
scale_min = min(scale_x, scale_y, scale_z) |
|
|
|
assert scale_min < 1e6 |
|
|
|
translate_x = - (max_x + min_x) / 2 * scale_min |
|
translate_y = - (max_y + min_y) / 2 * scale_min |
|
translate_z = - min_z * scale_min |
|
|
|
|
|
trans = Matrix.Translation((translate_x, translate_y, translate_z)) |
|
|
|
scale = Matrix.Scale(scale_min, 4, (1, 0, 0)) @ Matrix.Scale(scale_min, 4, (0, 1, 0)) @ Matrix.Scale(scale_min, 4, (0, 0, 1)) |
|
|
|
|
|
|
|
|
|
root = get_root_obj(objs[0]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
root.matrix_world = trans @ scale @ root.matrix_world |
|
|
|
|
|
|
|
|
|
|
|
|
|
bpy.context.view_layer.update() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def remesh(objs, target=5000): |
|
num_v = {} |
|
for obj in objs: |
|
num_v[obj] = len(obj.data.vertices) |
|
|
|
|
|
num_v_sort = sorted(num_v.items(), key=lambda x: x[1], reverse=True) |
|
|
|
|
|
total_v = sum([num_v[obj] for obj in num_v]) |
|
|
|
iters = 0 |
|
while total_v > target and iters<20: |
|
reduce = [] |
|
for obj, v in num_v_sort: |
|
reduce.append(obj) |
|
if sum([num_v[oo] for oo in reduce]) > 0.5 * total_v: |
|
break |
|
for obj in reduce: |
|
|
|
if obj.data.shape_keys is not None: |
|
|
|
num_v.pop(obj) |
|
continue |
|
|
|
ratio = 0.5 |
|
|
|
bpy.context.view_layer.objects.active = obj |
|
bpy.ops.object.modifier_add(type='DECIMATE') |
|
bpy.context.object.modifiers["Decimate"].ratio = ratio |
|
bpy.ops.object.modifier_apply(modifier="Decimate") |
|
|
|
num_v[obj] = len(obj.data.vertices) |
|
total_v = sum([num_v[obj] for obj in num_v]) |
|
num_v_sort = sorted(num_v.items(), key=lambda x: x[1], reverse=True) |
|
|
|
iters+=1 |
|
|
|
|
|
def get_parents(obj): |
|
if not obj.parent: |
|
return [obj.name] |
|
parents = get_parents(obj.parent) |
|
parents.append(obj.name) |
|
return parents |
|
|
|
def check(objs, arm): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for obj in objs: |
|
vertex_groups = obj.vertex_groups |
|
|
|
|
|
|
|
for vertex in obj.data.vertices: |
|
vertex_info = {} |
|
for group in vertex.groups: |
|
name = vertex_groups[group.group].name |
|
name = name.replace(" ", "_") |
|
if True: |
|
arm_modifier = [modifier for modifier in obj.modifiers if modifier.type == 'ARMATURE'] |
|
if len(arm_modifier) != 1: |
|
print('zyhsb', len(arm_modifier)) |
|
return -2 |
|
|
|
return 1 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete(objs): |
|
|
|
for obj in objs: |
|
vertex_groups = obj.vertex_groups |
|
if not vertex_groups: |
|
|
|
bpy.data.objects.remove(obj) |
|
|
|
meshes = [] |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == "MESH": |
|
meshes.append(obj) |
|
|
|
return meshes |
|
|
|
|
|
def merge_mesh(folder_path, export = None, save_join = True): |
|
|
|
|
|
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True) |
|
try: |
|
reload_json(folder_path) |
|
except: |
|
print("Error in reloading json file") |
|
|
|
os.system(f"rm -r {folder_path}") |
|
return None, None |
|
|
|
bpy.ops.object.select_all(action='DESELECT') |
|
if export: |
|
bpy.ops.wm.save_as_mainfile(filepath='reload_' + export) |
|
|
|
meshes = [] |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == "MESH": |
|
bpy.context.view_layer.objects.active = obj |
|
obj.select_set(True) |
|
meshes.append(obj) |
|
print("meshes length", len(meshes)) |
|
|
|
bpy.ops.object.join() |
|
if export: |
|
bpy.ops.wm.save_as_mainfile(filepath='join_' + export) |
|
|
|
meshes = [] |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == "MESH": |
|
meshes.append(obj) |
|
if len(meshes) != 1: |
|
bpy.ops.wm.save_as_mainfile(filepath='join_f.blend') |
|
assert len(meshes) == 1 |
|
|
|
|
|
|
|
if save_join: |
|
obj_path = os.path.join(folder_path, "object.obj") |
|
bpy.ops.wm.obj_export(filepath=obj_path, export_materials=False, export_uv=False, export_triangulated_mesh=True) |
|
|
|
|
|
|
|
|
|
|
|
total_armature_count = 0 |
|
armature_obj = [] |
|
mesh_obj = [] |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == "ARMATURE": |
|
total_armature_count += 1 |
|
armature_obj.append(obj) |
|
if obj.type == "MESH": |
|
mesh_obj.append(obj) |
|
if total_armature_count == 0: |
|
print("No rigging information for the file:", folder_path+"\n") |
|
return None, None |
|
|
|
|
|
|
|
vertex_group_name = [group.name for group in mesh_obj[0].vertex_groups] |
|
bpy.context.view_layer.objects.active = armature_obj[0] |
|
bpy.ops.object.mode_set(mode='EDIT') |
|
edit_bones = armature_obj[0].data.edit_bones |
|
bone_delete = set([bone.name for bone in edit_bones]) - set(vertex_group_name) |
|
print(f"Deleting {len(bone_delete)} bones") |
|
for bone in bone_delete: |
|
|
|
if edit_bones[bone].parent == None: |
|
|
|
num_children = len(edit_bones[bone].children) |
|
if num_children <= 1: |
|
edit_bones.remove(edit_bones[bone]) |
|
continue |
|
if num_children > 1: |
|
center = mathutils.Vector((0, 0, 0)) |
|
for child in edit_bones[bone].children: |
|
center += child.head |
|
center /= num_children |
|
min_dist = 1e9 |
|
for child in edit_bones[bone].children: |
|
dist = (child.head - center).length |
|
if dist < min_dist: |
|
min_dist = dist |
|
min_child = child |
|
for child in edit_bones[bone].children: |
|
if child != min_child: |
|
child.parent = min_child |
|
edit_bones.remove(edit_bones[bone]) |
|
continue |
|
continue |
|
|
|
bone_obj = edit_bones[bone] |
|
for child in bone_obj.children: |
|
child.parent = bone_obj.parent |
|
|
|
edit_bones.remove(edit_bones[bone]) |
|
bpy.ops.object.mode_set(mode='OBJECT') |
|
|
|
if export: |
|
bpy.ops.wm.save_as_mainfile(filepath='delete_' + export) |
|
|
|
mesh_obj = [] |
|
armature_obj = [] |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == "MESH": |
|
mesh_obj.append(obj) |
|
if obj.type == "ARMATURE": |
|
armature_obj.append(obj) |
|
assert len(mesh_obj) == 1 |
|
assert len(armature_obj) == 1 |
|
|
|
return mesh_obj, armature_obj |
|
|
|
|
|
def process(file_path, obj_path=None, stamp=None, tex=False): |
|
|
|
|
|
|
|
|
|
reset_scene() |
|
load_object(file_path) |
|
|
|
|
|
|
|
if 'glTF_not_exported' in bpy.data.collections: |
|
print('DELETE glTF_not_exported') |
|
bpy.data.collections.remove(bpy.data.collections['glTF_not_exported']) |
|
|
|
if stamp is not None: |
|
|
|
bpy.context.scene.frame_set(stamp) |
|
print(f'Set the current frame to {stamp}') |
|
|
|
|
|
bpy.context.view_layer.update() |
|
|
|
mesh_obj = [] |
|
armature_obj = [] |
|
for obj in bpy.context.scene.objects: |
|
if obj.type == "ARMATURE": |
|
|
|
|
|
|
|
armature_obj.append(obj) |
|
|
|
armature_obj[-1].data.pose_position = 'POSE' |
|
if obj.type == "MESH": |
|
mesh_obj.append(obj) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
mesh_obj = delete(mesh_obj) |
|
if len(mesh_obj) == 0: |
|
|
|
return -1 |
|
|
|
|
|
save_json(obj_path, mesh_obj, armature_obj, arm_name=True) |
|
|
|
|
|
if not tex: |
|
save_mesh(obj_path + '/object.obj') |
|
else: |
|
save_mesh(obj_path + '/object.obj', mtl=True, obj_path=obj_path) |
|
|
|
|
|
mesh_obj, armature_obj = merge_mesh(obj_path) |
|
if mesh_obj is None or armature_obj is None: |
|
|
|
return -2 |
|
|
|
|
|
try: |
|
normalize(mesh_obj) |
|
except: |
|
os.system(f"rm -r {obj_path}") |
|
|
|
return -3 |
|
|
|
|
|
save_json(obj_path, mesh_obj, armature_obj) |
|
|
|
if not tex: |
|
save_mesh(obj_path + '/object.obj') |
|
else: |
|
save_mesh(obj_path + '/object.obj', mtl=True, obj_path=obj_path) |
|
|
|
|
|
return 1 |
|
|
|
|
|
if __name__ == '__main__': |
|
|
|
parser = argparse.ArgumentParser() |
|
parser.add_argument( |
|
"--object_path", |
|
type=str, |
|
required=True, |
|
help="Path to the object file", |
|
) |
|
parser.add_argument( |
|
"--output_dir", |
|
type=str, |
|
required=True, |
|
help="Path to the directory where the rendered images and metadata will be saved.", |
|
) |
|
parser.add_argument( |
|
"--stamp", |
|
type=int, |
|
required=False, |
|
help="Stamp to be used for the rendering.", |
|
) |
|
parser.add_argument( |
|
"--tex", |
|
type=bool, |
|
required=False, |
|
help="Save the texture.", |
|
) |
|
argv = sys.argv[sys.argv.index("--") + 1 :] |
|
args = parser.parse_args(argv) |
|
|
|
os.makedirs(args.output_dir, exist_ok=True) |
|
stamp = args.stamp if args.stamp else None |
|
print(f'Stamp: {stamp}') |
|
result = process(args.object_path, obj_path=args.output_dir, stamp=stamp, tex=args.tex) |
|
|
|
|
|
|