diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..69c5b310a90ffc7e7ca980429f2173184baea330 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +car_models/* +output/* +output*/* + +mh_models/* +!mh_models/blender.* + +backgrounds/* +!default_green.png + +.idea/ + +*.pyc +*.blend1 +*.xlsx +*.bmp +*png +*.jpg +*.jpeg + +!assets/ \ No newline at end of file diff --git a/README.md b/README.md index f26411aafc53473a1a505760037b308f4f6cf3a0..3abe384015368c99c39f4a6bcdcbb3fd39a12382 100644 --- a/README.md +++ b/README.md @@ -1,92 +1,106 @@ -# synthetic_drivers +<h1 style="text-align:center"> +Synthetic Driver Image Generation for Human Pose-Related Tasks +</h1> +<div style="text-align:center"> +<h3> +<a href="https://liris.cnrs.fr/page-membre/romain-guesdon">Romain Guesdon</a>, +<a href="https://liris.cnrs.fr/page-membre/carlos-crispim-junior">Carlos Crispim-Junior</a>, +<a href="https://liris.cnrs.fr/page-membre/laure-tougne">Laure Tougne Rodet</a> +<br> +<br> +International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications (VISAPP) +</h3> +</div> +# Table of content +- [Overview](#overview) +- [Installation](#installation) +- [Citation](#citation) +- [Acknowledgements](#acknowledgements) +# Overview +This repository contains the materials presented in the paper +[Synthetic Driver Image Generation for Human Pose-Related Tasks](). +We provide scripts and guidance to generate your own images along with a link to download our 200k driver images dataset. -## Getting started -To make it easy for you to get started with GitLab, here's a list of recommended next steps. +# Installation +## Requirements +#### Blender +This script has been developed and ran using Blender 3.2.0. Blender can be donwloaded [here](https://www.blender.org/download/). +The following addons should also be enabled in Edit > Preferences > Add-Ons : +- MHX2 : http://download.tuxfamily.org/makehuman/plugins/mhx-blender-latest.zip +- Import images as planes +- Lighting : Sun position -Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)! +#### MakeHuman +Human models were generated using [MakeHuman Community](http://www.makehumancommunity.org/content/downloads.html). We used a modified skeleton rig that you can find in the `mh_models/` directory of this repo. Once MakeHuman is installed, copy the two "blender.*" files in `MAKEHUMAN-COMMUNITY-ROOT\makehuman\data\rigs`. +We also highly recommand to install [MassProduce](http://www.makehumancommunity.org/content/plugins.html) plugin to generate rapidely several models. -## Add your files +## Models +#### Human models +To generate human models using MakeHuman, first set: +- in Files > Export : Mesh format = Makehuman Exchange (.mhx2), Scale Units = meter +- in Pose / Animate > Skeleton > Rig presets = Blender -- [ ] [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files -- [ ] [Add files using the command line](https://docs.gitlab.com/ee/gitlab-basics/add-file.html#add-a-file-using-the-command-line) or push an existing Git repository with the following command: +Then, you can either mannualy modified your model or use MassProduce in the Community tab after setting all the random parameters, and export it as MHX2. +By default, you should place the mhx2 files in `mh_models/exports` and the associated texture files in `mh_models/exports/textures`. -``` -cd existing_repo -git remote add origin https://gitlab.liris.cnrs.fr/aura_autobehave/synthetic_drivers.git -git branch -M main -git push -uf origin main -``` - -## Integrate with your tools - -- [ ] [Set up project integrations](https://gitlab.liris.cnrs.fr/aura_autobehave/synthetic_drivers/-/settings/integrations) - -## Collaborate with your team - -- [ ] [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/) -- [ ] [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html) -- [ ] [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically) -- [ ] [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) -- [ ] [Automatically merge when pipeline succeeds](https://docs.gitlab.com/ee/user/project/merge_requests/merge_when_pipeline_succeeds.html) - -## Test and Deploy - -Use the built-in continuous integration in GitLab. +#### Cars models +You can use 3D car models you got. Car models should be imported in an empty Blender file, with its location and rotation set to 0 and a real-size scale. +The origin of the model should be placed on the driver since it will be used to place the driver. You can set an Empty Arrows object as the parent in Blender to achieve this goal, like below : -- [ ] [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/index.html) -- [ ] [Analyze your code for known vulnerabilities with Static Application Security Testing(SAST)](https://docs.gitlab.com/ee/user/application_security/sast/) -- [ ] [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html) -- [ ] [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/) -- [ ] [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html) +<img src="assets/car_origin_blender.png" alt= "car_origin_blender" width="200"/> -*** +Furthermore, you can add empty meshes such as cylinders or cubes to force the hand to be placed inside or outside. To do this, rename them "IN_..." or "OUT_...". We recommand placing a "OUT_" cylinder around the wheel, and a "IN_" box to delimit the inside of the car cockpit. -# Editing this README +Finally, you can put target for the wrists that will be used for the manual poses. To do this, place Empty Arrows in the desired location and rotatio, with the name "Target_R_..." or "Target_L_...", such as: -When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thank you to [makeareadme.com](https://www.makeareadme.com/) for this template. +<img src="assets/target_examples.png" alt= "target_examples" height="200"/> -## Suggestions for a good README -Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information. +Save each .blend file in a different folder with the required textures, and place these folders in a `car_models` folder. -## Name -Choose a self-explaining name for your project. +#### Backgrounds +Backgrounds images should be high resolution images of size 1800x800 pix, placed in the `backgrounds` folder. The image name should start either by "day_..." or "night_...", depending of the time of the day it depicts. -## Description -Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors. +# Run +To generate a scene, open the `base.blend` file in blender. Then, go to the scripting tabs and open `global_script.py`. Here are the different parameters you might want to change in this file: +- line 15: the absolute path to the cloned repository +- lines 77-84: relative paths to the blend car files, with the object name you will give to the car model in the generated scene +- line 97: path to the human models +- line 186: relative path to the output folder +- line 204: number of different scene (configuration of person, car, bacground) to generate +- line 205: number of pose per scene -## Badges -On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge. +Be aware that the last poses will be targeted poses (if you put targets in you car models). Therefore, if you set a number of pose too low, no random pose will be generated. -## Visuals -Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method. +Then run the script in Blender script. Depending on the number of scene and poses requested, it can take several minutes / hours. During this time, Blender will not be usable. -## Installation -Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection. +When the script stops, you should end up with images in the output folder. You can also manipulate the last generated scene in the Layout tab, where each pose corresponds to a time stamp. -## Usage -Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README. +<img src="assets/scene_complete.png" alt="scene_complete" height="400" /> -## Support -Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc. +# Generated dataset +Our dataset with 200k images used in our paper can be downloaded from [here](http://dionysos.univ-lyon2.fr/~ccrispim/SyntheDrivers/synthe_drivers-dataset.zip). -## Roadmap -If you have ideas for releases in the future, it is a good idea to list them in the README. - -## Contributing -State if you are open to contributions and what your requirements are for accepting them. - -For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self. - -You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser. - -## Authors and acknowledgment -Show your appreciation to those who have contributed to the project. - -## License -For open source projects, say how it is licensed. +# Citation +If you use our network or our code, please cite: +``` +@InProceedings{Guesdon_2023_Visapp, + author = {Guesdon, Romain and Crispim-Junior, Carlos and Tougne Rodet, Laure}, + title = {Synthetic Driver Image Generation for Human Pose-Related Tasks}, + booktitle={Proceedings of the 18th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications - To be published,}, + year={2023}, + pages={}, + publisher={SciTePress}, + organization={INSTICC}, +} +``` -## Project status -If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers. +# Acknowledgments +This work was supported by the Pack Ambition Recherche 2019 funding of the French AURA Region in +the context of the AutoBehave project. +<div style="text-align:center"> +<img style="margin-right: 20px" src="assets/logo_liris.png" alt="LIRIS logo" height="75" width="160"/> +<img style="margin-left: 20px" src="assets/logo_ra.png" alt="RA logo" height="60" width="262"/> +</div> diff --git a/assets/.gitkeep b/assets/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/assets/.gitkeep @@ -0,0 +1 @@ + diff --git a/assets/car_origin_blender.png b/assets/car_origin_blender.png new file mode 100644 index 0000000000000000000000000000000000000000..2be5d873b1ac311282904ea231ba56e4821f09a2 Binary files /dev/null and b/assets/car_origin_blender.png differ diff --git a/assets/logo_liris.png b/assets/logo_liris.png new file mode 100644 index 0000000000000000000000000000000000000000..37143f9d88e379ff7c6314eaa23b069f7ce997ef Binary files /dev/null and b/assets/logo_liris.png differ diff --git a/assets/logo_ra.png b/assets/logo_ra.png new file mode 100644 index 0000000000000000000000000000000000000000..6ceef305fd77646ce549c8eabbc6b34c0f048c7a Binary files /dev/null and b/assets/logo_ra.png differ diff --git a/assets/scene_complete.png b/assets/scene_complete.png new file mode 100644 index 0000000000000000000000000000000000000000..ff0ea40b637778c142e885ae8cc020e60ab95d10 Binary files /dev/null and b/assets/scene_complete.png differ diff --git a/assets/target_examples.png b/assets/target_examples.png new file mode 100644 index 0000000000000000000000000000000000000000..2b1bbd0dd81b1cd62a063dcecad761cc456a6e20 Binary files /dev/null and b/assets/target_examples.png differ diff --git a/backgrounds/default_green.png b/backgrounds/default_green.png new file mode 100644 index 0000000000000000000000000000000000000000..0a1e32695b6ab23ac09ef282db514e8dd2f03111 Binary files /dev/null and b/backgrounds/default_green.png differ diff --git a/base.blend b/base.blend new file mode 100644 index 0000000000000000000000000000000000000000..1affda0022474049e55f1e4be232221164edacdb Binary files /dev/null and b/base.blend differ diff --git a/global_script.py b/global_script.py new file mode 100644 index 0000000000000000000000000000000000000000..84c1ebe94adc8e0c5cb201dd76e5bf8c1fa48c22 --- /dev/null +++ b/global_script.py @@ -0,0 +1,383 @@ +import random +import glob +import json +import os +import sys +import shutil +from collections import OrderedDict +import time + +import bpy +from mathutils import Vector + +print('#' * 30) + +WORKING_DIR = r"synthetic_drivers" +os.chdir(WORKING_DIR) +sys.path.append(WORKING_DIR) + +from scripts import random_pose +from scripts import utils +from scripts import camera_proj +from scripts import human + +import importlib + +importlib.reload(random_pose) +importlib.reload(utils) +importlib.reload(camera_proj) +importlib.reload(human) + +from scripts.human import HumanLoader + + +def abs_path(rel_path): + return os.path.join(os.path.dirname(os.path.dirname(__file__)), rel_path) + + +random.seed() + +C = bpy.context +D = bpy.data + +CONTINUE = False + +# Clean scene +try: + bpy.ops.object.mode_set(mode='OBJECT') + utils.unselect_all() +except RuntimeError: + pass + +for col in D.collections: + D.collections.remove(col) + +for bpy_data_iter in ( + D.objects, + D.meshes, + D.lights, + D.cameras, + D.armatures, + D.images, +): + for id_data in bpy_data_iter: + bpy_data_iter.remove(id_data) + +for ob in D.objects: + C.scene.collection.objects.unlink(ob) + + +def main(): + # Import car + car_collection = D.collections.new("Cars") + C.scene.collection.children.link(car_collection) + + cars = [] + + for src_path, name in { + r"car_models\suv_car\car.blend": 'SUV', + r"car_models\red_car\red.blend": 'Red', + r"car_models\pickup_car\pickup.blend": 'PickUp', + r"car_models\family_car\family_car.blend": 'Family', + r"car_models\coupe_car\coupe_car.blend": 'Coupe', + r"car_models\truck\truck_open.blend": 'Truck', + + }.items(): + with D.libraries.load(abs_path(src_path)) as (data_from, data_to): + data_to.objects = data_from.objects + + for obj in data_to.objects: + car_collection.objects.link(obj) + + D.objects['Car'].name = name + cars.append(D.objects[name]) + car_picker = utils.Randomizer(cars) + + # import humans + human_loader = HumanLoader('mh_models/exports') + + # Creation scene + # add camera + C.scene.render.engine = 'BLENDER_EEVEE' + + camera_data = D.cameras.new(name='Camera') + camera_data.type = 'PERSP' + camera_data.lens_unit = 'FOV' + camera_data.angle = utils.r(68) + + C.scene.render.resolution_x = 320 + C.scene.render.resolution_y = int(0.75 * C.scene.render.resolution_x) + + camera_object = D.objects.new('Camera', camera_data) + C.scene.collection.objects.link(camera_object) + camera_object.rotation_mode = 'XYZ' + + camera_object.location = [-.97, -0.1, 0.68] + camera_rotation_0 = utils.r(Vector([73, 8, -82])) + camera_object.rotation_euler = camera_rotation_0 + + # set background + back_folder = abs_path("backgrounds") + back_imgs = {} + for key in ['night', 'day']: + list_imgs = glob.glob(os.path.join(back_folder, f'{key}_*')) + back_imgs[key] = [] + for img in list_imgs: + img_name = os.path.basename(img) + bpy.ops.image.open(filepath=img, directory=back_folder, + files=[{"name": img_name}], relative_path=False, show_multiview=False) + back_imgs[key].append(img_name) + + # Create image holder + bpy.ops.import_image.to_plane( + directory=back_folder, + files=[{"name": "default_green.png"}], + shader='SHADELESS', + use_transparency=False, + offset=False, + height=round(10 / 2.25, 1), + align_axis="X-" + ) + + image_holder = C.active_object + image_holder.name = 'Image_holder' + image_holder.location = (4, 1.5, 1.3) + image_holder.rotation_euler.z = utils.r(-95) + image_holder.active_material.shadow_method = 'NONE' + + # add light + sun_collection = D.collections.new("Sun") + C.scene.collection.children.link(sun_collection) + + light_params = { + 'day': { + 'energy_bounds': (10, 35), + 'back_color_bounds': (0.4, 0.65), + 'sun_color': (1, 1, 1) + }, + 'night': { + 'energy_bounds': (5, 15), + 'back_color_bounds': (0.15, 0.25), + 'sun_color': (1, 0.5, 0.2) + } + + } + + light_data = D.lights.new(name="Sun", type='SUN') + light_data.energy = 20 + light_object = D.objects.new(name="Sun", object_data=light_data) + + sun_collection.objects.link(light_object) + light_object.location = (5, 0, 3) + + bpy.data.worlds["World"].node_tree.nodes["Background.001"].inputs[1].default_value = 1.2 + + # Sun position + C.scene.sun_pos_properties.usage_mode = 'NORMAL' + C.scene.sun_pos_properties.sun_object = light_object + C.scene.sun_pos_properties.object_collection = sun_collection + C.scene.sun_pos_properties.object_collection_type = 'DIURNAL' + C.scene.sun_pos_properties.co_parser = "41°22′14″N 2°09′00″E" + C.scene.sun_pos_properties.sun_distance = 3 + C.scene.sun_pos_properties.use_day_of_year = True + C.scene.sun_pos_properties.year = 2022 + C.scene.sun_pos_properties.day_of_year = 182 + + fp = abs_path(r"output_temp") + fp_img = os.path.join(fp, 'images') + fp_ann_2D = os.path.join(fp, 'annots_2D') + fp_ann_3D = os.path.join(fp, 'annots_3D') + + info_path = os.path.join(fp, 'infos.json') + cam_path = os.path.join(fp, 'cameras.json') + scenes_ids = OrderedDict() + if not CONTINUE or not os.path.isfile(info_path): + if os.path.isdir(fp): + shutil.rmtree(fp) + os.mkdir(fp) + + os.mkdir(fp_img) + os.mkdir(fp_ann_2D) + os.mkdir(fp_ann_3D) + + frame_rate = 25 + nb_scene = 1 + nb_pose = 3 + + human_loader.max_len = min(human_loader.max_len, nb_scene) + ratio_conf_man = int(nb_scene / len(human_loader.human_paths)) + C.scene.frame_end = int(frame_rate * (nb_pose - 0.5)) + + if CONTINUE: + try: + with open(info_path) as f_info: + scenes_ids = json.load(f_info, object_pairs_hook=OrderedDict)['id_max_scenes'] + + human_loader.human_paths = [hp for hp in human_loader.human_paths if hp not in scenes_ids] + except FileNotFoundError: + pass + + man = None + for sc in range(ratio_conf_man * len(scenes_ids), nb_scene): + # Random car + car = car_picker() + car_targets = {side: [ch for ch in car.children if f'Target_{side.upper()}' in ch.name] for side in 'lr'} + nb_targets = len(car_targets['l']) * len(car_targets['r']) + # Random personne + if ratio_conf_man < 1: + if not sc % 10: + human_loader.load_next() + man = human_loader(car=car) + else: + if not sc % ratio_conf_man: + man = human_loader.next(car=car) + else: + # human.set_bounds(man, car) + man = man(car=car) + + man_model = man.model + + human_path = human_loader.paths[man_model] + scenes_ids.setdefault(human_path, -1) + scenes_ids[human_path] += 1 + + # Random time + C.scene.sun_pos_properties.north_offset = utils.r(random.randint(-179, 180)) + time_day = random.randint(8, 18) + C.scene.sun_pos_properties.time = time_day + if random.random() < 4 / 5: + day_night = 'day' + else: + day_night = 'night' + + light_param = light_params[day_night] + light_data.energy = random.randint(*light_param['energy_bounds']) + light_data.color = light_param['sun_color'] + back_val = random.uniform(*light_param['back_color_bounds']) + bpy.data.worlds["World"].node_tree.nodes["Background.001"].inputs[0].default_value = \ + (back_val, back_val, back_val, 1) + + # Random background + back_img = random.choice(back_imgs[day_night]) + image_holder.active_material.node_tree.nodes['Image Texture'].image = D.images[back_img] + image_holder.location.y = 1.5 + random.uniform(-0.3, 0.3) + + # Camera movement + camera_object.rotation_euler = camera_rotation_0 + utils.r(Vector([random.randint(-2, 2), 0, 0])) + + C.scene.render.filepath = fp + C.scene.render.image_settings.file_format = 'PNG' + C.scene.camera = camera_object + + P, K, RT = camera_proj.get_3x4_P_matrix_from_blender(camera_object) + + file_root_name = f'{list(scenes_ids).index(human_path)}_{scenes_ids[human_path]}' + + if os.path.isfile(cam_path): + with open(cam_path, 'r') as f_cam: + previous_cameras = f_cam.read() + if previous_cameras: + previous_cameras = json.loads(previous_cameras) + else: + previous_cameras = {} + else: + previous_cameras = {} + with open(cam_path, 'w') as f_cam: + previous_cameras[file_root_name] = { + 'P': utils.mat_to_list(P), + 'K': utils.mat_to_list(K), + 'RT': utils.mat_to_list(RT), + } + json.dump(previous_cameras, f_cam, indent=4) + + man_model.animation_data_clear() + # Exemple: 150k / 200 / 2 = 1500 poses + with open(os.path.join(fp_ann_2D, f'annotations_{file_root_name}.csv'), 'w') as annot_file_2D, \ + open(os.path.join(fp_ann_3D, f'annotations_{file_root_name}.csv'), 'w') as annot_file_3D: + bone_lbls = list(man_model.pose.bones.keys()) + bone_lbls = [ + lbl for bone in bone_lbls for lbl in [bone + k for k in ['_head', '_tail']] if '_IK' not in lbl + ] + face_lbls = ['nose', 'eye_l', 'eye_r', 'ear_l', 'ear_r'] + full_lbls = bone_lbls + face_lbls + annot_file_2D.write( + ';'.join([lbl for bone in full_lbls for lbl in [bone + k for k in ['_x', '_y']]]) + '\n') + annot_file_3D.write( + ';'.join( + [lbl for bone in full_lbls for lbl in [bone + k for k in ['_X', '_Y', '_Z']]]) + '\n') + + for po in range(nb_pose): + C.scene.frame_set(po * frame_rate) + use_targets = nb_pose - po - 1 < nb_targets + + # use_targets = False + human.switch_constraints(man_model, enable=not use_targets) + if nb_pose < nb_targets or not use_targets: + id_targets = None + else: + id_targets = {'l': (nb_pose - po - 1) % len(car_targets['l']), + 'r': (nb_pose - po - 1) // len(car_targets['l'])} + random_pose.random_pose_ik(man_model, targets=car_targets if use_targets else None, id_targets=id_targets) + + bpy.ops.object.mode_set(mode='OBJECT') + + man_model.keyframe_insert(data_path="location", index=-1) + man_model.keyframe_insert(data_path="rotation_euler", index=-1) + + bpy.ops.object.mode_set(mode='POSE') + + for bone in man_model.pose.bones: + bone.keyframe_insert(data_path="rotation_euler", index=-1) + if bone.name[-3:] == '_IK': + bone.keyframe_insert(data_path="location", index=-1) + + bpy.ops.object.mode_set(mode='OBJECT') + + # set output path so render won't get overwritten + C.scene.render.filepath = os.path.join(fp_img, + f"{file_root_name}_{po}" + (f'_drive' if use_targets else '')) + bpy.ops.render.render(write_still=True) # render still + + annotations_2D = [] + annotations_3D = [] + for lbl in bone_lbls: + if '_tail' in lbl: + bone_3d = utils.get_tail_pose(lbl[:-5], man_model) + else: + bone_3d = utils.get_head_pose(lbl[:-5], man_model) + annotations_3D.append(f"{bone_3d[0]:.3f};{bone_3d[1]:.3f};{bone_3d[2]:.3f}") + + bone_2d = P @ bone_3d + bone_2d /= bone_2d[-1] + annotations_2D.append(f"{bone_2d[0]:.2f};{bone_2d[1]:.2f}") + + for lbl, bone_3d in human.get_face(man_model).items(): + annotations_3D.append(f"{bone_3d[0]:.3f};{bone_3d[1]:.3f};{bone_3d[2]:.3f}") + + bone_2d = P @ bone_3d + bone_2d /= bone_2d[-1] + annotations_2D.append(f"{bone_2d[0]:.2f};{bone_2d[1]:.2f}") + + annot_file_2D.write(';'.join(annotations_2D) + '\n') + annot_file_3D.write(';'.join(annotations_3D) + '\n') + + with open(info_path, 'w') as f_infos: + json.dump({ + 'models': list(scenes_ids), + 'id_max_scenes': scenes_ids + }, f_infos, indent=4) + + if sc * nb_pose % 4000 == 3999: + time.sleep(150) + + utils.select_only(man_model) + bpy.ops.object.mode_set(mode='POSE') + + print('Done', '#' * 25) + + +if __name__ == '__main__': + try: + main() + except utils.StopError: + pass diff --git a/mh_models/blender.mhskel b/mh_models/blender.mhskel new file mode 100644 index 0000000000000000000000000000000000000000..a2a3e36b3c37424a3ebca938eefa897d460c15e6 --- /dev/null +++ b/mh_models/blender.mhskel @@ -0,0 +1,1576 @@ +{ + "description": "Game engine rig with basic fame markers, to put in AppData/Local/makehuman-community/makehuman/data/rigs", + "bones": { + + "Root": { + "head": "Root____head", + "parent": null, + "reference": null, + "roll": -1.4193105697631836, + "tail": "Root____tail" + }, + "ball_l": { + "head": "ball_l____head", + "parent": "foot_l", + "reference": ["toe1-1.L"], + "roll": -0.013643646612763405, + "weights_reference": ["toe1-1.L","toe1-2.L","toe1-3.L","toe2-1.L","toe2-2.L","toe2-3.L","toe3-1.L","toe3-2.L","toe3-3.L","toe4-1.L","toe4-2.L","toe4-3.L","toe5-1.L","toe5-2.L","toe5-3.L"], + "tail": "ball_l____tail" + }, + "ball_r": { + "head": "ball_r____head", + "parent": "foot_r", + "reference": ["toe1-1.R"], + "roll": 0.013643646612763405, + "weights_reference": ["toe1-1.R","toe1-2.R","toe1-3.R","toe2-1.R","toe2-2.R","toe2-3.R","toe3-1.R","toe3-2.R","toe3-3.R","toe4-1.R","toe4-2.R","toe4-3.R","toe5-1.R","toe5-2.R","toe5-3.R"], + "tail": "ball_r____tail" + }, + "calf_l": { + "head": "calf_l____head", + "parent": "thigh_l", + "reference": ["lowerleg01.L","lowerleg02.L"], + "roll": 0.0, + "tail": "calf_l____tail" + }, + "calf_r": { + "head": "calf_r____head", + "parent": "thigh_r", + "reference": ["lowerleg01.R","lowerleg02.R"], + "roll": 0.0, + "tail": "calf_r____tail" + }, + "clavicle_l": { + "head": "clavicle_l____head", + "parent": "spine_03", + "reference": ["clavicle.L","shoulder01.L"], + "roll": 3.1415927410125732, + "tail": "clavicle_l____tail" + }, + "clavicle_r": { + "head": "clavicle_r____head", + "parent": "spine_03", + "reference": ["clavicle.R","shoulder01.R"], + "roll": -3.1415927410125732, + "tail": "clavicle_r____tail" + }, + "foot_l": { + "head": "foot_l____head", + "parent": "calf_l", + "reference": ["foot.L"], + "roll": 0.20731236040592194, + "tail": "foot_l____tail" + }, + "foot_r": { + "head": "foot_r____head", + "parent": "calf_r", + "reference": ["foot.R"], + "roll": -0.20731236040592194, + "tail": "foot_r____tail" + }, + "hand_l": { + "head": "hand_l____head", + "parent": "lowerarm_l", + "reference": ["wrist.L","metacarpal2.L"], + "roll": -0.5689927339553833, + "tail": "hand_l____tail", + "weights_reference": ["wrist.L","metacarpal1.L","metacarpal2.L","metacarpal3.L","metacarpal4.L"] + }, + "hand_r": { + "head": "hand_r____head", + "parent": "lowerarm_r", + "reference": ["wrist.R","metacarpal2.R"], + "roll": 0.5689927339553833, + "tail": "hand_r____tail", + "weights_reference": ["wrist.R","metacarpal1.R","metacarpal2.R","metacarpal3.R","metacarpal4.R"] + }, + "head": { + "head": "head____head", + "parent": "neck_01", + "reference": ["head"], + "roll": -3.3251599390604625e-11, + "tail": "head____tail" + }, + "index_01_l": { + "head": "index_01_l____head", + "parent": "hand_l", + "reference": ["finger2-1.L"], + "roll": -0.7064877152442932, + "tail": "index_01_l____tail" + }, + "index_01_r": { + "head": "index_01_r____head", + "parent": "hand_r", + "reference": ["finger2-1.R"], + "roll": 0.7064877152442932, + "tail": "index_01_r____tail" + }, + "index_02_l": { + "head": "index_02_l____head", + "parent": "index_01_l", + "reference": ["finger2-2.L"], + "roll": -1.0015888214111328, + "tail": "index_02_l____tail" + }, + "index_02_r": { + "head": "index_02_r____head", + "parent": "index_01_r", + "reference": ["finger2-2.R"], + "roll": 1.0015888214111328, + "tail": "index_02_r____tail" + }, + "index_03_l": { + "head": "index_03_l____head", + "parent": "index_02_l", + "reference": ["finger2-3.L"], + "roll": -1.0090924501419067, + "tail": "index_03_l____tail" + }, + "index_03_r": { + "head": "index_03_r____head", + "parent": "index_02_r", + "reference": ["finger2-3.R"], + "roll": 1.0090924501419067, + "tail": "index_03_r____tail" + }, + "lowerarm_l": { + "head": "lowerarm_l____head", + "parent": "upperarm_l", + "reference": ["lowerarm01.L","lowerarm02.L"], + "roll": 1.6184003353118896, + "tail": "lowerarm_l____tail" + }, + "lowerarm_r": { + "head": "lowerarm_r____head", + "parent": "upperarm_r", + "reference": ["lowerarm01.R","lowerarm02.R"], + "roll": -1.6184003353118896, + "tail": "lowerarm_r____tail" + }, + "middle_01_l": { + "head": "middle_01_l____head", + "parent": "hand_l", + "reference": ["finger3-1.L"], + "roll": -0.18284985423088074, + "tail": "middle_01_l____tail" + }, + "middle_01_r": { + "head": "middle_01_r____head", + "parent": "hand_r", + "reference": ["finger3-1.R"], + "roll": 0.18284985423088074, + "tail": "middle_01_r____tail" + }, + "middle_02_l": { + "head": "middle_02_l____head", + "parent": "middle_01_l", + "reference": ["finger3-2.L"], + "roll": -0.6355217695236206, + "tail": "middle_02_l____tail" + }, + "middle_02_r": { + "head": "middle_02_r____head", + "parent": "middle_01_r", + "reference": ["finger3-2.R"], + "roll": 0.6355217695236206, + "tail": "middle_02_r____tail" + }, + "middle_03_l": { + "head": "middle_03_l____head", + "parent": "middle_02_l", + "reference": ["finger3-3.L"], + "roll": -0.595303475856781, + "tail": "middle_03_l____tail" + }, + "middle_03_r": { + "head": "middle_03_r____head", + "parent": "middle_02_r", + "reference": ["finger3-3.R"], + "roll": 0.595303475856781, + "tail": "middle_03_r____tail" + }, + "neck_01": { + "head": "neck_01____head", + "parent": "spine_03", + "reference": ["neck01","neck02","neck03"], + "roll": 0.0, + "tail": "neck_01____tail" + }, + "pelvis": { + "head": "pelvis____head", + "parent": "Root", + "reference": ["root", "spine05"], + "weights_reference": ["root", "spine05","pelvis.R","pelvis.L"], + "roll": -4.466459702090475e-10, + "tail": "pelvis____tail" + }, + "pinky_01_l": { + "head": "pinky_01_l____head", + "parent": "hand_l", + "reference": ["finger5-1.L"], + "roll": -0.17082808911800385, + "tail": "pinky_01_l____tail" + }, + "pinky_01_r": { + "head": "pinky_01_r____head", + "parent": "hand_r", + "reference": ["finger5-1.R"], + "roll": 0.17082808911800385, + "tail": "pinky_01_r____tail" + }, + "pinky_02_l": { + "head": "pinky_02_l____head", + "parent": "pinky_01_l", + "reference": ["finger5-2.L"], + "roll": -0.5542842149734497, + "tail": "pinky_02_l____tail" + }, + "pinky_02_r": { + "head": "pinky_02_r____head", + "parent": "pinky_01_r", + "reference": ["finger5-2.R"], + "roll": 0.5542842149734497, + "tail": "pinky_02_r____tail" + }, + "pinky_03_l": { + "head": "pinky_03_l____head", + "parent": "pinky_02_l", + "reference": ["finger5-3.L"], + "roll": -0.5284829139709473, + "tail": "pinky_03_l____tail" + }, + "pinky_03_r": { + "head": "pinky_03_r____head", + "parent": "pinky_02_r", + "reference": ["finger5-3.R"], + "roll": 0.5284829139709473, + "tail": "pinky_03_r____tail" + }, + "ring_01_l": { + "head": "ring_01_l____head", + "parent": "hand_l", + "reference": ["finger4-1.L"], + "roll": 0.1281958520412445, + "tail": "ring_01_l____tail" + }, + "ring_01_r": { + "head": "ring_01_r____head", + "parent": "hand_r", + "reference": ["finger4-1.R"], + "roll": -0.1281958520412445, + "tail": "ring_01_r____tail" + }, + "ring_02_l": { + "head": "ring_02_l____head", + "parent": "ring_01_l", + "reference": ["finger4-2.L"], + "roll": -0.4322725832462311, + "tail": "ring_02_l____tail" + }, + "ring_02_r": { + "head": "ring_02_r____head", + "parent": "ring_01_r", + "reference": ["finger4-2.R"], + "roll": 0.4322725832462311, + "tail": "ring_02_r____tail" + }, + "ring_03_l": { + "head": "ring_03_l____head", + "parent": "ring_02_l", + "reference": ["finger4-3.L"], + "roll": -0.3292544186115265, + "tail": "ring_03_l____tail" + }, + "ring_03_r": { + "head": "ring_03_r____head", + "parent": "ring_02_r", + "reference": ["finger4-3.R"], + "roll": 0.3292544186115265, + "tail": "ring_03_r____tail" + }, + "spine_01": { + "head": "spine_01____head", + "parent": "pelvis", + "reference": ["spine04"], + "roll": 7.760130871226067e-10, + "tail": "spine_01____tail" + }, + "spine_02": { + "head": "spine_02____head", + "parent": "spine_01", + "reference": ["spine03"], + "roll": -2.192008373884128e-16, + "tail": "spine_02____tail" + }, + "spine_03": { + "head": "spine_03____head", + "parent": "spine_02", + "reference": ["spine01","spine02"], + "roll": -6.143906006172983e-08, + "tail": "spine_03____tail" + }, + "thigh_l": { + "head": "thigh_l____head", + "parent": "pelvis", + "reference": ["pelvis.L","upperleg01.L","upperleg02.L"], + "weights_reference": ["upperleg01.L","upperleg02.L"], + "roll": 1.9113411903381348, + "tail": "thigh_l____tail" + }, + "thigh_r": { + "head": "thigh_r____head", + "parent": "pelvis", + "reference": ["pelvis.R","upperleg01.R","upperleg02.R"], + "weights_reference": ["upperleg01.R","upperleg02.R"], + "roll": -1.9113411903381348, + "tail": "thigh_r____tail" + }, + "thumb_01_l": { + "head": "thumb_01_l____head", + "parent": "hand_l", + "reference": ["finger1-1.L"], + "roll": -0.9263502955436707, + "tail": "thumb_01_l____tail" + }, + "thumb_01_r": { + "head": "thumb_01_r____head", + "parent": "hand_r", + "reference": ["finger1-1.R"], + "roll": 0.927350640296936, + "tail": "thumb_01_r____tail" + }, + "thumb_02_l": { + "head": "thumb_02_l____head", + "parent": "thumb_01_l", + "reference": ["finger1-2.L"], + "roll": -0.061334121972322464, + "tail": "thumb_02_l____tail" + }, + "thumb_02_r": { + "head": "thumb_02_r____head", + "parent": "thumb_01_r", + "reference": ["finger1-2.R"], + "roll": 0.061334121972322464, + "tail": "thumb_02_r____tail" + }, + "thumb_03_l": { + "head": "thumb_03_l____head", + "parent": "thumb_02_l", + "reference": ["finger1-3.L"], + "roll": -0.08036887645721436, + "tail": "thumb_03_l____tail" + }, + "thumb_03_r": { + "head": "thumb_03_r____head", + "parent": "thumb_02_r", + "reference": ["finger1-3.R"], + "roll": 0.08036887645721436, + "tail": "thumb_03_r____tail" + }, + "upperarm_l": { + "head": "upperarm_l____head", + "parent": "clavicle_l", + "reference": ["upperarm01.L","upperarm02.L"], + "roll": 3.1391799449920654, + "tail": "upperarm_l____tail" + }, + "upperarm_r": { + "head": "upperarm_r____head", + "parent": "clavicle_r", + "reference": ["upperarm01.R","upperarm02.R"], + "roll": -3.1391799449920654, + "tail": "upperarm_r____tail" + }, + "ear_l": { + "head": "ear_l____head", + "parent": "head", + "reference": null, + "roll": 0.5, + "tail": "ear_l____tail" + }, + "ear_r": { + "head": "ear_r____head", + "parent": "head", + "reference": null, + "roll": -0.5, + "tail": "ear_r____tail" + }, + "eye_l": { + "head": "eye_l____head", + "parent": "head", + "reference": ["eye.L"], + "roll": 0.04707758128643036, + "tail": "eye_l____tail" + }, + "eye_r": { + "head": "eye_r____head", + "parent": "head", + "reference": ["eye.R"], + "roll": -0.04707758128643036, + "tail": "eye_r____tail" + }, + "chin": { + "head": "chin____head", + "parent": "head", + "reference": null, + "roll": -3.141592502593994, + "tail": "chin____tail" + }, + "nose": { + "head": "nose____head", + "parent": "head", + "reference": null, + "roll": -1.695007085800171e-06, + "tail": "nose____tail" + } + }, + "joints": { + + "Root____head": [ + 19151, + 19152, + 19155, + 19156 + ], + "Root____tail": [ + 19150, + 19153, + 19154, + 19157 + ], + "ball_l____head": [ + 13670, + 13671, + 13672, + 13673, + 13674, + 13675, + 13676, + 13677 + ], + "ball_l____tail": [ + 13726, + 13727, + 13728, + 13729, + 13730, + 13731, + 13732, + 13733 + ], + "ball_r____head": [ + 14110, + 14111, + 14112, + 14113, + 14114, + 14115, + 14116, + 14117 + ], + "ball_r____tail": [ + 14166, + 14167, + 14168, + 14169, + 14170, + 14171, + 14172, + 14173 + ], + "calf_l____head": [ + 13838, + 13839, + 13840, + 13841, + 13842, + 13843, + 13844, + 13845 + ], + "calf_l____tail": [ + 13830, + 13831, + 13832, + 13833, + 13834, + 13835, + 13836, + 13837 + ], + "calf_r____head": [ + 14278, + 14279, + 14280, + 14281, + 14282, + 14283, + 14284, + 14285 + ], + "calf_r____tail": [ + 14270, + 14271, + 14272, + 14273, + 14274, + 14275, + 14276, + 14277 + ], + "clavicle_l____head": [ + 14054, + 14055, + 14056, + 14057, + 14058, + 14059, + 14060, + 14061 + ], + "clavicle_l____tail": [ + 14046, + 14047, + 14048, + 14049, + 14050, + 14051, + 14052, + 14053 + ], + "clavicle_r____head": [ + 14494, + 14495, + 14496, + 14497, + 14498, + 14499, + 14500, + 14501 + ], + "clavicle_r____tail": [ + 14486, + 14487, + 14488, + 14489, + 14490, + 14491, + 14492, + 14493 + ], + "middle_01_r____head": [ + 14406, + 14407, + 14408, + 14409, + 14410, + 14411, + 14412, + 14413 + ], + "middle_01_r____tail": [ + 14366, + 14367, + 14368, + 14369, + 14370, + 14371, + 14372, + 14373 + ], + "foot_l____head": [ + 13830, + 13831, + 13832, + 13833, + 13834, + 13835, + 13836, + 13837 + ], + "foot_l____tail": [ + 13670, + 13671, + 13672, + 13673, + 13674, + 13675, + 13676, + 13677 + ], + "foot_r____head": [ + 14270, + 14271, + 14272, + 14273, + 14274, + 14275, + 14276, + 14277 + ], + "foot_r____tail": [ + 14110, + 14111, + 14112, + 14113, + 14114, + 14115, + 14116, + 14117 + ], + "hand_l____head": [ + 14030, + 14031, + 14032, + 14033, + 14034, + 14035, + 14036, + 14037 + ], + "hand_l____tail": [ + 14014, + 14015, + 14016, + 14017, + 14018, + 14019, + 14020, + 14021 + ], + "hand_r____head": [ + 14470, + 14471, + 14472, + 14473, + 14474, + 14475, + 14476, + 14477 + ], + "hand_r____tail": [ + 14454, + 14455, + 14456, + 14457, + 14458, + 14459, + 14460, + 14461 + ], + "head____head": [ + 14070, + 14071, + 14072, + 14073, + 14074, + 14075, + 14076, + 14077 + ], + "head____tail": [ + 14566, + 14567, + 14568, + 14569, + 14570, + 14571, + 14572, + 14573 + ], + "index_01_l____head": [ + 13974, + 13975, + 13976, + 13977, + 13978, + 13979, + 13980, + 13981 + ], + "index_01_l____tail": [ + 13918, + 13919, + 13920, + 13921, + 13922, + 13923, + 13924, + 13925 + ], + "index_01_r____head": [ + 14414, + 14415, + 14416, + 14417, + 14418, + 14419, + 14420, + 14421 + ], + "index_01_r____tail": [ + 14358, + 14359, + 14360, + 14361, + 14362, + 14363, + 14364, + 14365 + ], + "index_02_l____head": [ + 13918, + 13919, + 13920, + 13921, + 13922, + 13923, + 13924, + 13925 + ], + "index_02_l____tail": [ + 13910, + 13911, + 13912, + 13913, + 13914, + 13915, + 13916, + 13917 + ], + "index_02_r____head": [ + 14358, + 14359, + 14360, + 14361, + 14362, + 14363, + 14364, + 14365 + ], + "index_02_r____tail": [ + 14350, + 14351, + 14352, + 14353, + 14354, + 14355, + 14356, + 14357 + ], + "index_03_l____head": [ + 13910, + 13911, + 13912, + 13913, + 13914, + 13915, + 13916, + 13917 + ], + "index_03_l____tail": [ + 13854, + 13855, + 13856, + 13857, + 13858, + 13859, + 13860, + 13861 + ], + "index_03_r____head": [ + 14350, + 14351, + 14352, + 14353, + 14354, + 14355, + 14356, + 14357 + ], + "index_03_r____tail": [ + 14294, + 14295, + 14296, + 14297, + 14298, + 14299, + 14300, + 14301 + ], + "lowerarm_l____head": [ + 14038, + 14039, + 14040, + 14041, + 14042, + 14043, + 14044, + 14045 + ], + "lowerarm_l____tail": [ + 14030, + 14031, + 14032, + 14033, + 14034, + 14035, + 14036, + 14037 + ], + "lowerarm_r____head": [ + 14478, + 14479, + 14480, + 14481, + 14482, + 14483, + 14484, + 14485 + ], + "lowerarm_r____tail": [ + 14470, + 14471, + 14472, + 14473, + 14474, + 14475, + 14476, + 14477 + ], + "middle_01_l____head": [ + 13966, + 13967, + 13968, + 13969, + 13970, + 13971, + 13972, + 13973 + ], + "middle_01_l____tail": [ + 13926, + 13927, + 13928, + 13929, + 13930, + 13931, + 13932, + 13933 + ], + "middle_02_l____head": [ + 13926, + 13927, + 13928, + 13929, + 13930, + 13931, + 13932, + 13933 + ], + "middle_02_l____tail": [ + 13902, + 13903, + 13904, + 13905, + 13906, + 13907, + 13908, + 13909 + ], + "middle_02_r____head": [ + 14366, + 14367, + 14368, + 14369, + 14370, + 14371, + 14372, + 14373 + ], + "middle_02_r____tail": [ + 14342, + 14343, + 14344, + 14345, + 14346, + 14347, + 14348, + 14349 + ], + "middle_03_l____head": [ + 13902, + 13903, + 13904, + 13905, + 13906, + 13907, + 13908, + 13909 + ], + "middle_03_l____tail": [ + 13862, + 13863, + 13864, + 13865, + 13866, + 13867, + 13868, + 13869 + ], + "middle_03_r____head": [ + 14342, + 14343, + 14344, + 14345, + 14346, + 14347, + 14348, + 14349 + ], + "middle_03_r____tail": [ + 14302, + 14303, + 14304, + 14305, + 14306, + 14307, + 14308, + 14309 + ], + "neck_01____head": [ + 14534, + 14535, + 14536, + 14537, + 14538, + 14539, + 14540, + 14541 + ], + "neck_01____tail": [ + 14070, + 14071, + 14072, + 14073, + 14074, + 14075, + 14076, + 14077 + ], + "pelvis____head": [ + 13622, + 13623, + 13624, + 13625, + 13626, + 13627, + 13628, + 13629 + ], + "pelvis____tail": [ + 13630, + 13631, + 13632, + 13633, + 13634, + 13635, + 13636, + 13637 + ], + "pinky_01_l____head": [ + 13950, + 13951, + 13952, + 13953, + 13954, + 13955, + 13956, + 13957 + ], + "pinky_01_l____tail": [ + 13942, + 13943, + 13944, + 13945, + 13946, + 13947, + 13948, + 13949 + ], + "pinky_01_r____head": [ + 14390, + 14391, + 14392, + 14393, + 14394, + 14395, + 14396, + 14397 + ], + "pinky_01_r____tail": [ + 14382, + 14383, + 14384, + 14385, + 14386, + 14387, + 14388, + 14389 + ], + "pinky_02_l____head": [ + 13942, + 13943, + 13944, + 13945, + 13946, + 13947, + 13948, + 13949 + ], + "pinky_02_l____tail": [ + 13886, + 13887, + 13888, + 13889, + 13890, + 13891, + 13892, + 13893 + ], + "pinky_02_r____head": [ + 14382, + 14383, + 14384, + 14385, + 14386, + 14387, + 14388, + 14389 + ], + "pinky_02_r____tail": [ + 14326, + 14327, + 14328, + 14329, + 14330, + 14331, + 14332, + 14333 + ], + "pinky_03_l____head": [ + 13886, + 13887, + 13888, + 13889, + 13890, + 13891, + 13892, + 13893 + ], + "pinky_03_l____tail": [ + 13878, + 13879, + 13880, + 13881, + 13882, + 13883, + 13884, + 13885 + ], + "pinky_03_r____head": [ + 14326, + 14327, + 14328, + 14329, + 14330, + 14331, + 14332, + 14333 + ], + "pinky_03_r____tail": [ + 14318, + 14319, + 14320, + 14321, + 14322, + 14323, + 14324, + 14325 + ], + "ring_01_l____head": [ + 13958, + 13959, + 13960, + 13961, + 13962, + 13963, + 13964, + 13965 + ], + "ring_01_l____tail": [ + 13934, + 13935, + 13936, + 13937, + 13938, + 13939, + 13940, + 13941 + ], + "ring_01_r____head": [ + 14398, + 14399, + 14400, + 14401, + 14402, + 14403, + 14404, + 14405 + ], + "ring_01_r____tail": [ + 14374, + 14375, + 14376, + 14377, + 14378, + 14379, + 14380, + 14381 + ], + "ring_02_l____head": [ + 13934, + 13935, + 13936, + 13937, + 13938, + 13939, + 13940, + 13941 + ], + "ring_02_l____tail": [ + 13894, + 13895, + 13896, + 13897, + 13898, + 13899, + 13900, + 13901 + ], + "ring_02_r____head": [ + 14374, + 14375, + 14376, + 14377, + 14378, + 14379, + 14380, + 14381 + ], + "ring_02_r____tail": [ + 14334, + 14335, + 14336, + 14337, + 14338, + 14339, + 14340, + 14341 + ], + "ring_03_l____head": [ + 13894, + 13895, + 13896, + 13897, + 13898, + 13899, + 13900, + 13901 + ], + "ring_03_l____tail": [ + 13870, + 13871, + 13872, + 13873, + 13874, + 13875, + 13876, + 13877 + ], + "ring_03_r____head": [ + 14334, + 14335, + 14336, + 14337, + 14338, + 14339, + 14340, + 14341 + ], + "ring_03_r____tail": [ + 14310, + 14311, + 14312, + 14313, + 14314, + 14315, + 14316, + 14317 + ], + "spine_01____head": [ + 13630, + 13631, + 13632, + 13633, + 13634, + 13635, + 13636, + 13637 + ], + "spine_01____tail": [ + 13638, + 13639, + 13640, + 13641, + 13642, + 13643, + 13644, + 13645 + ], + "spine_02____head": [ + 13638, + 13639, + 13640, + 13641, + 13642, + 13643, + 13644, + 13645 + ], + "spine_02____tail": [ + 13646, + 13647, + 13648, + 13649, + 13650, + 13651, + 13652, + 13653 + ], + "spine_03____head": [ + 13646, + 13647, + 13648, + 13649, + 13650, + 13651, + 13652, + 13653 + ], + "spine_03____tail": [ + 13654, + 13655, + 13656, + 13657, + 13658, + 13659, + 13660, + 13661 + ], + "thigh_l____head": [ + 13846, + 13847, + 13848, + 13849, + 13850, + 13851, + 13852, + 13853 + ], + "thigh_l____tail": [ + 13838, + 13839, + 13840, + 13841, + 13842, + 13843, + 13844, + 13845 + ], + "thigh_r____head": [ + 14286, + 14287, + 14288, + 14289, + 14290, + 14291, + 14292, + 14293 + ], + "thigh_r____tail": [ + 14278, + 14279, + 14280, + 14281, + 14282, + 14283, + 14284, + 14285 + ], + "thumb_01_l____head": [ + 14006, + 14007, + 14008, + 14009, + 14010, + 14011, + 14012, + 14013 + ], + "thumb_01_l____tail": [ + 13998, + 13999, + 14000, + 14001, + 14002, + 14003, + 14004, + 14005 + ], + "thumb_01_r____head": [ + 14446, + 14447, + 14448, + 14449, + 14450, + 14451, + 14452, + 14453 + ], + "thumb_01_r____tail": [ + 14438, + 14439, + 14440, + 14441, + 14442, + 14443, + 14444, + 14445 + ], + "thumb_02_l____head": [ + 13998, + 13999, + 14000, + 14001, + 14002, + 14003, + 14004, + 14005 + ], + "thumb_02_l____tail": [ + 13990, + 13991, + 13992, + 13993, + 13994, + 13995, + 13996, + 13997 + ], + "thumb_02_r____head": [ + 14438, + 14439, + 14440, + 14441, + 14442, + 14443, + 14444, + 14445 + ], + "thumb_02_r____tail": [ + 14430, + 14431, + 14432, + 14433, + 14434, + 14435, + 14436, + 14437 + ], + "thumb_03_l____head": [ + 13990, + 13991, + 13992, + 13993, + 13994, + 13995, + 13996, + 13997 + ], + "thumb_03_l____tail": [ + 13982, + 13983, + 13984, + 13985, + 13986, + 13987, + 13988, + 13989 + ], + "thumb_03_r____head": [ + 14430, + 14431, + 14432, + 14433, + 14434, + 14435, + 14436, + 14437 + ], + "thumb_03_r____tail": [ + 14422, + 14423, + 14424, + 14425, + 14426, + 14427, + 14428, + 14429 + ], + "upperarm_l____head": [ + 14046, + 14047, + 14048, + 14049, + 14050, + 14051, + 14052, + 14053 + ], + "upperarm_l____tail": [ + 14038, + 14039, + 14040, + 14041, + 14042, + 14043, + 14044, + 14045 + ], + "upperarm_r____head": [ + 14486, + 14487, + 14488, + 14489, + 14490, + 14491, + 14492, + 14493 + ], + "upperarm_r____tail": [ + 14478, + 14479, + 14480, + 14481, + 14482, + 14483, + 14484, + 14485 + ], + "ear_l____head": [ + 14070, + 14071, + 14072, + 14073, + 14074, + 14075, + 14076, + 14077 + ], + "ear_l____tail": [ + 12289 + ], + "ear_r____head": [ + 14070, + 14071, + 14072, + 14073, + 14074, + 14075, + 14076, + 14077 + ], + "ear_r____tail": [ + 5692 + ],"eye_l____head": [ + 13606, + 13607, + 13608, + 13609, + 13610, + 13611, + 13612, + 13613 + ], + "eye_l____tail": [ + 14086, + 14087, + 14088, + 14089, + 14090, + 14091, + 14092, + 14093 + ], + "eye_r____head": [ + 13614, + 13615, + 13616, + 13617, + 13618, + 13619, + 13620, + 13621 + ], + "eye_r____tail": [ + 14518, + 14519, + 14520, + 14521, + 14522, + 14523, + 14524, + 14525 + ], + "chin____head": [ + 14590, + 14591, + 14592, + 14593, + 14594, + 14595, + 14596, + 14597 + ], + "chin____tail": [ + 5171 + ], + "nose____head": [ + 14590, + 14591, + 14592, + 14593, + 14594, + 14595, + 14596, + 14597 + ], + "nose____tail": [ + 5054 + ] + }, + "license": "GNU Affero General Public License 3", + "name": "Blender", + "tags": ["mocap"], + "version": 110 +} \ No newline at end of file diff --git a/mh_models/blender.thumb b/mh_models/blender.thumb new file mode 100644 index 0000000000000000000000000000000000000000..6f3fe1470195c3da011f49ca70844563dea838e6 Binary files /dev/null and b/mh_models/blender.thumb differ diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scripts/camera_proj.py b/scripts/camera_proj.py new file mode 100644 index 0000000000000000000000000000000000000000..8e8192457cd2bd1a8da5e6f0f0693f394c76b1ee --- /dev/null +++ b/scripts/camera_proj.py @@ -0,0 +1,143 @@ +import bpy +from mathutils import Matrix, Vector + + +# --------------------------------------------------------------- +# 3x4 P matrix from Blender camera +# --------------------------------------------------------------- + +# BKE_camera_sensor_size +def get_sensor_size(sensor_fit, sensor_x, sensor_y): + if sensor_fit == 'VERTICAL': + return sensor_y + return sensor_x + + +# BKE_camera_sensor_fit +def get_sensor_fit(sensor_fit, size_x, size_y): + if sensor_fit == 'AUTO': + if size_x >= size_y: + return 'HORIZONTAL' + else: + return 'VERTICAL' + return sensor_fit + + +# Build intrinsic camera parameters from Blender camera data +# +# See notes on this in +# blender.stackexchange.com/questions/15102/what-is-blenders-camera-projection-matrix-model +# as well as +# https://blender.stackexchange.com/a/120063/3581 +def get_calibration_matrix_K_from_blender(camd): + if camd.type != 'PERSP': + raise ValueError('Non-perspective cameras not supported') + scene = bpy.context.scene + f_in_mm = camd.lens + scale = scene.render.resolution_percentage / 100 + resolution_x_in_px = scale * scene.render.resolution_x + resolution_y_in_px = scale * scene.render.resolution_y + sensor_size_in_mm = get_sensor_size(camd.sensor_fit, camd.sensor_width, camd.sensor_height) + sensor_fit = get_sensor_fit( + camd.sensor_fit, + scene.render.pixel_aspect_x * resolution_x_in_px, + scene.render.pixel_aspect_y * resolution_y_in_px + ) + pixel_aspect_ratio = scene.render.pixel_aspect_y / scene.render.pixel_aspect_x + if sensor_fit == 'HORIZONTAL': + view_fac_in_px = resolution_x_in_px + else: + view_fac_in_px = pixel_aspect_ratio * resolution_y_in_px + pixel_size_mm_per_px = sensor_size_in_mm / f_in_mm / view_fac_in_px + s_u = 1 / pixel_size_mm_per_px + s_v = 1 / pixel_size_mm_per_px / pixel_aspect_ratio + + # Parameters of intrinsic calibration matrix K + u_0 = resolution_x_in_px / 2 - camd.shift_x * view_fac_in_px + v_0 = resolution_y_in_px / 2 + camd.shift_y * view_fac_in_px / pixel_aspect_ratio + skew = 0 # only use rectangular pixels + + K = Matrix( + ((s_u, skew, u_0), + (0, s_v, v_0), + (0, 0, 1))) + return K + + +# Returns camera rotation and translation matrices from Blender. +# +# There are 3 coordinate systems involved: +# 1. The World coordinates: "world" +# - right-handed +# 2. The Blender camera coordinates: "bcam" +# - x is horizontal +# - y is up +# - right-handed: negative z look-at direction +# 3. The desired computer vision camera coordinates: "cv" +# - x is horizontal +# - y is down (to align to the actual pixel coordinates +# used in digital images) +# - right-handed: positive z look-at direction +def get_3x4_RT_matrix_from_blender(cam): + # bcam stands for blender camera + R_bcam2cv = Matrix( + ((1, 0, 0), + (0, -1, 0), + (0, 0, -1))) + + # Transpose since the rotation is object rotation, + # and we want coordinate rotation + # R_world2bcam = cam.rotation_euler.to_matrix().transposed() + # T_world2bcam = -1*R_world2bcam @ location + # + # Use matrix_world instead to account for all constraints + location, rotation = cam.matrix_world.decompose()[0:2] + R_world2bcam = rotation.to_matrix().transposed() + + # Convert camera location to translation vector used in coordinate changes + # T_world2bcam = -1*R_world2bcam @ cam.location + # Use location from matrix_world to account for constraints: + T_world2bcam = -1 * R_world2bcam @ location + + # Build the coordinate transform matrix from world to computer vision camera + R_world2cv = R_bcam2cv @ R_world2bcam + T_world2cv = R_bcam2cv @ T_world2bcam + + # put into 3x4 matrix + RT = Matrix(( + R_world2cv[0][:] + (T_world2cv[0],), + R_world2cv[1][:] + (T_world2cv[1],), + R_world2cv[2][:] + (T_world2cv[2],) + )) + return RT + + +def get_3x4_P_matrix_from_blender(cam): + bpy.context.view_layer.update() + K = get_calibration_matrix_K_from_blender(cam.data) + RT = get_3x4_RT_matrix_from_blender(cam) + return K @ RT, K, RT + + +# ---------------------------------------------------------- +if __name__ == "__main__": + # Insert your camera name here + cam = bpy.data.objects['Camera'] + P, K, RT = get_3x4_P_matrix_from_blender(cam) + print("K") + print(K) + print("RT") + print(RT) + print("P") + print(P) + + print("==== 3D Cursor projection ====") + pc = P @ bpy.context.scene.cursor.location + pc /= pc[2] + print("Projected cursor location") + print(pc) + + # Bonus code: save the 3x4 P matrix into a plain text file + # Don't forget to import numpy for this + # nP = numpy.matrix(P) + # numpy.savetxt("/tmp/P3x4.txt", nP) # to select precision, use e.g. fmt='%.2f diff --git a/scripts/human.py b/scripts/human.py new file mode 100644 index 0000000000000000000000000000000000000000..051390136869f913252e445d2b105b5f786fad47 --- /dev/null +++ b/scripts/human.py @@ -0,0 +1,381 @@ +import json +import glob +import os + +from mathutils import Vector, Matrix +import colorsys + +from scripts import utils + +import importlib + +importlib.reload(utils) +from scripts.utils import * + +D = bpy.data + +# Load clothes list +with open(r"mh_models\mh_mass_produce.json") as f: + data = json.load(f) + hairs = [name.replace(' ', '_') for name in data['hair']['allNames']] + tops = [] + bottoms = [] + suits = [] + for name, cloth in data['clothes']['clothesInfos'].items(): + if cloth["maleUpper"] or cloth["femaleUpper"] or cloth["mixedUpper"]: + tops.append(name.replace(' ', '_')) + if cloth["maleLower"] or cloth["femaleLower"] or cloth["mixedLower"]: + bottoms.append(name.replace(' ', '_')) + if cloth["maleFull"] or cloth["femaleFull"] or cloth["mixedFull"]: + bottoms.append(name.replace(' ', '_')) + + +class Human: + def __init__(self, model): + self.model = model + self.name = self.model.name + + self.init_textures() + self.init_bones() + self.top = self.init_color_clothes(tops + suits) + self.bot = self.init_color_clothes(bottoms + suits) + + self.hairs = self.init_color_hairs(hairs) + + def init_color_clothes(self, clothes): + mat = None + for obj in self.model.children: + for clo in clothes: + if clo.lower() in obj.name.lower(): + if not len(obj.material_slots): + break + utils.select_only(obj) + mat = obj.material_slots[0].material + mat.use_nodes = True + obj.active_material = mat + nodes = mat.node_tree.nodes + node_ramp = nodes.new("ShaderNodeValToRGB") + node_ramp.color_ramp.elements.remove(node_ramp.color_ramp.elements[0]) + node_ramp.color_ramp.elements[0].position = 1. + node_ramp.color_ramp.color_mode = 'HSV' + + input_links = nodes['Principled BSDF'].inputs[0].links + nodes["Principled BSDF"].inputs["Roughness"].default_value = max( + nodes["Principled BSDF"].inputs["Roughness"].default_value, 0.35) + + if len(input_links): + img_node = nodes['Principled BSDF'].inputs[0].links[0].from_node + + node_mix = nodes.new("ShaderNodeMixRGB") + node_mix.blend_type = 'MIX' + node_mix.inputs[0].default_value = 0.5 + + mat.node_tree.links.new(img_node.outputs[0], node_mix.inputs[1]) + mat.node_tree.links.new(node_ramp.outputs[0], node_mix.inputs[2]) + mat.node_tree.links.new(node_mix.outputs[0], nodes['Principled BSDF'].inputs[0]) + else: + mat.node_tree.links.new(node_ramp.outputs[0], nodes['Principled BSDF'].inputs[0]) + + node_ramp.color_ramp.elements[0].color = random_HSV() + node_ramp.color_ramp.elements[0].color = random_HSV() + break + + return mat + + def init_color_hairs(self, hairs): + mat = None + for obj in self.model.children: + for hair in hairs: + if hair.lower() in obj.name.lower(): + if not len(obj.material_slots): + break + + utils.select_only(obj) + mat = obj.material_slots[0].material + mat.use_nodes = True + obj.active_material = mat + nodes = mat.node_tree.nodes + + input_links = nodes['Principled BSDF'].inputs[0].links + nodes["Principled BSDF"].inputs["Roughness"].default_value = max( + nodes["Principled BSDF"].inputs["Roughness"].default_value, 0.5) + + if len(input_links): + img_node = nodes['Principled BSDF'].inputs[0].links[0].from_node + + node_hsv = nodes.new("ShaderNodeHueSaturation") + node_hsv.inputs[1].default_value = 0. + + node_mix = nodes.new("ShaderNodeMixRGB") + node_mix.blend_type = 'ADD' + node_mix.inputs[0].default_value = 1. + node_mix.inputs[2].default_value = random_color_hair() + + mat.node_tree.links.new(img_node.outputs[0], node_hsv.inputs[4]) + mat.node_tree.links.new(node_hsv.outputs[0], node_mix.inputs[1]) + mat.node_tree.links.new(node_mix.outputs[0], nodes['Principled BSDF'].inputs[0]) + + break + return mat + + def init_bones(self): + armature = bpy.data.armatures[self.model.name] + bpy.ops.object.mode_set(mode='EDIT') + for s in ('l', 'r'): + # Disconnect clavicle + armature.edit_bones[f'clavicle_{s}'].use_connect = False + + # Create IK + hand = armature.edit_bones[f'hand_{s}'] + hand_ik = armature.edit_bones.new(f'hand_{s}_IK') + utils.select_only_edit_bone(armature, hand_ik) + hand_ik.length = hand.length + bpy.ops.transform.resize(value=Vector((1.5, 1.5, 1.5))) + + bpy.ops.object.mode_set(mode='POSE') + ik_constraints = { + 'upperarm': [(-45, 120), (-90, 90), (-80, 80)], + 'lowerarm': [(-30, 120), None, None], + 'clavicle': [None, None, None] + } + for s in ('l', 'r'): + lowerarm = self.model.pose.bones[f'lowerarm_{s}'] + lowerarm.constraints.new('IK') + lowerarm.constraints['IK'].target = self.model + lowerarm.constraints['IK'].subtarget = f'hand_{s}_IK' + lowerarm.constraints['IK'].chain_count = 2 + + hand = self.model.pose.bones[f'hand_{s}'] + hand.constraints.new("COPY_ROTATION") + hand.constraints['Copy Rotation'].enabled = False + hand.constraints['Copy Rotation'].target = self.model + hand.constraints['Copy Rotation'].subtarget = f'hand_{s}_IK' + + for name, consts in ik_constraints.items(): + bone = self.model.pose.bones[f'{name}_{s}'] + for axe, const in zip(('x', 'y', 'z'), consts): + if const is None: + setattr(bone, f'lock_ik_{axe}', True) + else: + setattr(bone, f'lock_ik_{axe}', False) + setattr(bone, f'use_ik_limit_{axe}', True) + setattr(bone, f'ik_min_{axe}', utils.r(const[0])) + setattr(bone, f'ik_max_{axe}', utils.r(const[1])) + + self.model.pose.bones[f'hand_{s}_IK'].location = Vector((0, 1.2, 0.25)) + + for i in '123': + self.model.pose.bones[f'spine_0{i}'].lock_ik_x = False + self.model.pose.bones[f'spine_0{i}'].use_ik_limit_x = True + self.model.pose.bones[f'spine_0{i}'].ik_min_x = utils.r(20) if i == '3' else 0 + self.model.pose.bones[f'spine_0{i}'].lock_ik_z = True + + bpy.ops.object.mode_set(mode='OBJECT') + + def init_textures(self): + for o in self.model.children: + if not o.type == 'MESH': + continue + if 'Mesh' not in o.name: + o.name = o.name.split(':')[-1].lower() + "Mesh" + if "eye" in o.name or "high" in o.name: + continue + try: + for hair in hairs: + if hair.lower().replace(' ', '_') in o.name.lower(): + o.active_material.blend_method = 'HASHED' + break + else: + o.active_material.blend_method = 'OPAQUE' + + except AttributeError: + continue + + def refresh(self, car=None): + for cloth, name in ((self.top, 'Top'), (self.bot, 'Bot')): + if cloth is not None: + cloth.node_tree.nodes["ColorRamp"].color_ramp.elements[0].color = random_HSV() + cloth.node_tree.nodes["Mix"].inputs[0].default_value = random.uniform(0.2, 0.6) + else: + print(name, self.model.name) + + if "Mix" in self.hairs.node_tree.nodes: + self.hairs.node_tree.nodes["Mix"].inputs[2].default_value = random_color_hair() + else: + self.hairs.node_tree.nodes["Principled BSDF"].inputs[0].default_value = random_color_hair() + print(car) + set_bounds(self.model, car) + + +def get_face(model): + # Compute approximate coordinates of face markers + previous_mode = bpy.context.mode + bpy.ops.object.mode_set(mode='OBJECT') + matrix_world = model.matrix_world.copy() + + face_3D = {} + for marker in ['nose', 'eye_l', 'eye_r', 'ear_l', 'ear_r']: + if 'eye' in marker: + marker_loc = model.pose.bones[marker].head * 0.65 + model.pose.bones[marker].tail * 0.35 + else: + marker_loc = model.pose.bones[marker].tail + face_3D[marker] = matrix_world @ marker_loc + + bpy.ops.object.mode_set(mode=previous_mode) + return face_3D + + +def random_HSV(): + color_hsv = [random.random() for _ in range(3)] + color_hsv[1] *= 0.8 # threshold saturation + rgb_color = colorsys.hsv_to_rgb(*color_hsv) + + return list(rgb_color) + [1, ] + + +def random_color_hair(): + color_hsv = [random.uniform(0.06, 0.12), 0.7, random.random() ** 3 * 0.8] + rgb_color = colorsys.hsv_to_rgb(*color_hsv) + + return list(rgb_color) + [1, ] + + +def switch_constraints(model, enable=False): + for s in 'lr': + hand_ik = model.pose.bones[f'hand_{s}_IK'] + for constr in hand_ik.constraints: + constr.enabled = enable + model.pose.bones[f'hand_{s}'].constraints['Copy Rotation'].enabled = not enable + model.pose.bones[f'lowerarm_{s}'].constraints['IK'].chain_count = 2 + + +def set_bounds(model, car=None): + set_shrinkwraps(model, car) + + +def set_shrinkwraps(model, car=None): + original_mode = bpy.context.mode + utils.select_only(model) + bpy.ops.object.mode_set(mode='POSE') + + out_objects = None + if car is not None: + out_objects = [ch for ch in car.children_recursive if (ch.name[:4] == 'OUT_' or ch.name[:3] == 'IN_')] + + for s in 'lr': + bone = model.pose.bones[f'hand_{s}_IK'] + [bone.constraints.remove(constr) for constr in bone.constraints if 'Shrinkwrap' in constr.name] + if car is None: + continue + for obj in out_objects: + constr = bone.constraints.new('SHRINKWRAP') + constr.target = obj + constr.wrap_mode = 'OUTSIDE' if obj.name[:4] == 'OUT_' else 'INSIDE' + if 'Back' in obj.name: + constr.distance = model.pose.bones['lowerarm_l'].length * model.scale.x / obj.scale.y * 1 + if 'Side' in obj.name: + constr.distance = model.pose.bones[ + 'lowerarm_l'].length * model.scale.x / obj.scale.z * 0.2 + else: + constr.distance = model.pose.bones['hand_l'].length * model.scale.x / obj.scale.z * 1.5 + + bpy.ops.object.mode_set(mode=original_mode) + + +class HumanLoader: + def __init__(self, dir_path, max_len=10): + self.human_paths = glob.glob(os.path.join(dir_path, '*.mhx2')) + random.shuffle(self.human_paths) + self.paths = {} + self.humans = {} + self.max_len = max_len + self.start_loaded = 0 + self.current_idx = -1 + + self.collection = D.collections.new("Humans") + C.scene.collection.children.link(self.collection) + + self.picker = None + # self.load_next() + + def load_next(self): + if self.max_len >= len(self.human_paths): + # If asked too much + self.start_loaded = 0 + self.max_len = len(self.human_paths) + if len(self.paths) == len(self.human_paths): + # If everything already loaded + return + else: + end_loaded = len(self.human_paths) + else: + end_loaded = self.start_loaded + self.max_len + + human_paths = (self.human_paths * 2)[self.start_loaded:end_loaded] + human_paths = list({k: None for k in human_paths}) + + already_loaded = [hp for hp in human_paths if hp in self.paths.values()] + self.paths = {k: v for k, v in self.paths.items() if v in already_loaded} + self.humans = {k: v for k, v in self.humans.items() if k in already_loaded} + clear_humans(exceptions=list(self.paths)) + for human_path in human_paths: + if human_path in already_loaded: + continue + bpy.ops.import_scene.makehuman_mhx2(filepath=os.path.abspath(human_path)) + + model = C.active_object + self.paths[model] = human_path + self.move_human(model) + armature = D.armatures[model.name] + armature.show_axes = True + armature.display_type = "OCTAHEDRAL" + model.show_in_front = False + self.humans[human_path] = Human(model) + + self.picker = utils.Randomizer(list(self.humans.values())) + self.start_loaded = end_loaded % len(self.human_paths) + + def next(self, car=None): + self.current_idx += 1 + if not self.current_idx % self.max_len: + self.load_next() + self.current_idx = 0 + choice = self.picker.get(self.current_idx) + choice.refresh(car=car) + return choice + + def move_human(self, obj): + for ch in obj.children: + self.move_human(ch) + + if obj.name not in self.collection.objects: + for col in obj.users_collection: + if col not in [C.collection, self.collection.objects]: + D.collections.remove(col) + self.collection.objects.link(obj) + + utils.select_only(obj) + + def __call__(self, car=None): + choice = self.picker() + choice.refresh(car=car) + return choice + + +def clear_humans(exceptions=[]): + collection = D.collections["Humans"] + exceptions = exceptions + [ch for obj in exceptions for ch in obj.children_recursive] + + for obj in collection.objects: + if obj in exceptions: + continue + for ch in obj.children_recursive + [obj]: + obj_data = ch.data + if isinstance(obj_data, bpy.types.Armature): + D.armatures.remove(obj_data) + elif isinstance(obj_data, bpy.types.Mesh): + D.meshes.remove(obj_data) + try: + D.objects.remove(obj_data) + except ReferenceError: + pass diff --git a/scripts/random_pose.py b/scripts/random_pose.py new file mode 100644 index 0000000000000000000000000000000000000000..4c27112ba74aadb0fe9ece8ecc55153cc70b2a2c --- /dev/null +++ b/scripts/random_pose.py @@ -0,0 +1,217 @@ +from math import cos, sin, pi, acos + +from mathutils import Vector + +from scripts import utils + +import importlib + +importlib.reload(utils) +from scripts.utils import * + + +def default_rots(): + return { + 'hand_r': (0, 0, 0), + 'hand_l': (0, 0, 0), + 'thigh_r': (r(-70), r(-5 - random.random() * 15), 0), + 'thigh_l': (r(-70), r(5 + random.random() * 15), 0), + 'calf_r': (r(20 + random.random() * 50), r(30), 0), + 'calf_l': (r(20 + random.random() * 50), r(-30), 0) + } + + +def bounds(): + values = { + 'spine_03': [[0, 40], 45, None], + 'upperarm_l': [None, 10, 10], + 'upperarm_r': [None, 25, 20], + 'neck_01': [[0, 40], 0, 10], + 'lowerarm_X': [[-25, 50], [0, 35], None] + } + + temp = values.copy() + for k, v in temp.items(): + if '_X' in k: + values.update({k.replace('X', u): v for u in ['l', 'r']}) + del (values[k]) + + for k, v in values.items(): + values[k] = [[-a, a] if isinstance(a, int) else a for a in v] + + return values + + +def get_angles(angs): + new_angs = [] + for ang in angs: + if isinstance(ang, list): + new_angs.append(random.randint(*ang)) + elif isinstance(ang, int): + new_angs.append(random.randint(-ang, ang)) + elif ang is None: + new_angs.append(0) + + return Vector(r(new_angs)) + + +def reset_subject(subject): + bpy.ops.object.mode_set(mode='POSE') + subject.location = [0, 0, 0] + subject.rotation_euler = [0, 0, 0] + + for bone in subject.pose.bones: + bone.rotation_mode = 'XYZ' + if bone.name in ["Root", "pelvis"]: + continue + if '_IK' in bone.name: + bone.location = (0, 1.2, 0.25) + else: + bone.location = (0, 0, 0) + bone.rotation_euler = (0, 0, 0) + + bpy.ops.object.mode_set(mode='OBJECT') + subject.scale = [0.9] * 3 + sit_height = utils.get_head_pose('pelvis', subject).z * 0.88 + subject.location = [0, -0.04, - sit_height] + subject.rotation_euler = r([-16, 0, 0]) + + +def hand_pose(pose, side, grasp=None): + if grasp is None: + hand_ratio = random.uniform(0.1, 0.8) + elif grasp is True: + hand_ratio = random.uniform(0.5, 0.8) + elif grasp is False: + hand_ratio = random.uniform(0.05, 0.15) + else: + hand_ratio = 0 + + for finger in ['thumb', 'index', 'middle', 'ring', 'pinky']: + angles = r([40, 40, 40]) if finger == 'thumb' else r([70, 90, 40]) + solo_ratio = random.uniform(0.7, 1) * hand_ratio + for i in range(3): + pose.bones[f'{finger}_{i + 1:02}_{side}'].rotation_euler.x = angles[i] * solo_ratio + + +def random_pose_ik(subject, auto_ik=False, targets=None, id_targets=None): + """ + 1- reset and fix legs + 2- randomize back + 2b- bend back when too much twisted + 3- randomize neck (backward proportional to back bending) + 4- move arms with IK + :param subject: subject Object + :param auto_ik: use auto_ik option + :param targets: choose among fixed wrist targets + :return: + """ + # 1 + bpy.ops.object.mode_set(mode='OBJECT') + + pose = subject.pose + select_only(subject) + + def rota(bone): + return Vector(pose.bones[bone].rotation_euler) + + reset_subject(subject) + arm_length = dist(get_head_pose('upperarm_l', subject), get_head_pose('hand_l', subject)) + + base_rots = default_rots() + matrix_world = C.active_object.matrix_world.copy() + bpy.ops.object.mode_set(mode='POSE') + + bounds_vals = bounds() + for bone, angles in base_rots.items(): + pose.bones[bone].rotation_euler = angles + + if targets is None: + # 2 + pose.bones['spine_03'].rotation_euler = get_angles(bounds_vals['spine_03']) + + # 2b Compensate for shoulder in seat by bending back to front + pose.bones['spine_01'].rotation_euler = r( + Vector((random.randint(0, 10) + max(d(abs(rota('spine_03').y)) - 20, 0) / 2, 0, + 0))) + rota('spine_01') + + # 3 + pose.bones['neck_01'].rotation_euler = ( + get_angles(bounds_vals['neck_01']) + + get_angles( + [[d((rota('spine_01').x + rota('spine_03').x) * -0.5), 0], None, None]) + + Vector((0, random.uniform(0, 0.5) * rota('spine_03').y, 0)) + ) + + else: + pose.bones['spine_03'].rotation_euler = get_angles([[5, 20], 15, None]) + pose.bones['neck_01'].rotation_euler = get_angles([[5, 25], 0, 10]) + + pose.use_auto_ik = auto_ik + + for s in ['l', 'r']: + # Disconnect clavicle + armature = bpy.data.armatures[subject.name] + + if auto_ik: + pose_bone = pose.bones[f'hand_{s}'] + else: + pose_bone = pose.bones[f'hand_{s}_IK'] + + bone = pose_bone.bone + select_only_bone(armature, bone) + + if targets is None: + target = Vector() + shoulder_pose = get_head_pose(f'upperarm_{s}', subject) + + back_forward_angle = rota('spine_03').x + rota('spine_01').x - r(30) # 0 = straight + + phi = random.uniform( + max((r(-160) if s == 'r' else r(-100)) + rota('spine_03').y, r(-160)), + min((r(-80) if s == 'r' else r(-40)) + rota('spine_03').y, r(-20)) + ) + theta_bound = 0.8 + costheta = random.uniform(max(-0.8, -cos(theta_bound - back_forward_angle - rota('neck_01').x)), + min(0.8, cos(theta_bound + back_forward_angle + rota('neck_01').x))) + + theta = acos(costheta) + + min_arm_factor = 0.2 + max(sin(back_forward_angle), 0) + u = random.uniform(min_arm_factor, 1) * arm_length + + target.x = u * sin(theta) * cos(phi) + target.y = u * sin(theta) * sin(phi) + target.z = u * cos(theta) + + target += shoulder_pose + + hand_pose(pose, side=s) + else: + if id_targets is None: + target = random.choice(targets[s]) + else: + try: + target = targets[s][id_targets[s]] + except IndexError as err: + print(targets[s], id_targets, s) + raise (err) + pose.bones[f'hand_{s}_IK'].rotation_euler = Vector((0, 0, 0)) + pose.bones[f'hand_{s}_IK'].rotation_euler = ( + ((matrix_world @ pose.bones[f'hand_{s}_IK'].matrix).inverted() @ target.matrix_world).to_euler()) + hand_pose(pose, side=s, grasp="_close" in target.name) + + target = target.location + + location = get_head_pose(bone.name, subject) + bpy.ops.transform.translate(value=target - location) + + if targets is not None: + for s in 'lr': + subject.pose.bones[f'lowerarm_{s}'].constraints['IK'].chain_count = 6 + + bpy.ops.object.mode_set(mode='OBJECT') + + +if __name__ == '__main__': + subject = get_object('Subject') diff --git a/scripts/utils.py b/scripts/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0c4bcb4eedeb0bec31f1f6dd26f4e84fbd5422f7 --- /dev/null +++ b/scripts/utils.py @@ -0,0 +1,163 @@ +import random +from math import pi + +import bpy +import numpy as np + + +# Maths utils +def rd(a, d): + if type(a) in [float, int]: + if d: + return int(180 * a / pi) + else: + return pi * a / 180. + else: + try: + iterator = iter(a) + except TypeError as te: + raise ValueError('Cant convert to radians ', a) + return type(a)([r(k) for k in a]) + + +def r(a): + return rd(a, d=False) + + +def d(a): + return rd(a, d=True) + + +def dist(a, b): + return sum([k ** 2 for k in (a - b)]) ** 0.5 + + +# Blender utils +C = bpy.context +D = bpy.data + + +def hide_object(obj, hide=True): + for o in obj.children: + hide_object(o, hide=hide) + obj.hide_set(hide) + obj.hide_select = hide + if 'Plane' in obj.name or 'OUT_' in obj.name or 'IN_' in obj.name: + obj.hide_render = True + obj.hide_set(True) + else: + obj.hide_render = hide + + +def get_object(name): + return bpy.data.objects[name] + + +def unselect_all(): + select_only(None) + + +def select_only(obj=None): + for ob in bpy.data.objects: + ob.select_set(False) + + if obj is None: + return + + obj.select_set(True) + bpy.context.view_layer.objects.active = obj + + +def select_only_bone(armature, bone=None): + for bo in armature.bones: + bo.select = False + + if bone is None: + return + + bone.select = True + + +def select_only_edit_bone(armature, bone=None): + for bo in armature.edit_bones: + bo.select = False + bo.select_head = False + bo.select_tail = False + + if bone is None: + return + + bone.select = True + bone.select_head = True + bone.select_tail = True + armature.edit_bones.active = bone + + +def _get_bone_pose(bone, struct, side): + if isinstance(struct, str): + struct = get_object(struct) + + current_mode = bpy.context.mode + bpy.ops.object.mode_set(mode='OBJECT') + matrix_world = struct.matrix_world + bpy.ops.object.mode_set(mode=current_mode) + if side == "tail": + side_3d = struct.pose.bones[bone].tail + else: + side_3d = struct.pose.bones[bone].head + + return (matrix_world @ side_3d.to_4d()).to_3d() + + +def get_head_pose(bone, struct='Subject'): + return _get_bone_pose(bone, struct, side='head') + + +def get_tail_pose(bone, struct='Subject'): + return _get_bone_pose(bone, struct, side='tail') + + +# Other utils +def mat_to_list(mat): + return np.array(mat).tolist() + + +class Randomizer: + def __init__(self, objects): + self.objects = objects + for obj in self: + hide_object(obj) + + def to_list(self): + return [obj if isinstance(obj, bpy.types.Object) else obj.model for obj in self.objects] + + def __iter__(self): + return iter(self.to_list()) + + def __getitem__(self, item): + return self.to_list()[item] + + def __len__(self): + return len(self.objects) + + def get(self, pick_idx): + pick = self.objects[pick_idx] + # pick = pick if isinstance(pick, bpy.types.Object) else pick(*args, **kwargs) + self.swap_object(self[pick_idx]) + return pick + + def __call__(self): + pick_idx = random.randint(0, len(self) - 1) + return self.get(pick_idx) + + def swap_object(self, obj=None): + for o in self: + if not o.hide_get(): + hide_object(o) + + if obj is not None: + hide_object(obj, hide=False) + + +class StopError(Exception): + pass